pax_global_header00006660000000000000000000000064144477256630014534gustar00rootroot0000000000000052 comment=d9ab19b9741a96c756a1f39b6b24986ea8ca975d Wikidata-Toolkit-0.14.6/000077500000000000000000000000001444772566300150045ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/.github/000077500000000000000000000000001444772566300163445ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/.github/dependabot.yml000066400000000000000000000006461444772566300212020ustar00rootroot00000000000000# Documentation for all configuration options: # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: # For openrefine java deps - package-ecosystem: "maven" directory: "/" schedule: interval: "daily" open-pull-requests-limit: 10 # For github actions - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" Wikidata-Toolkit-0.14.6/.github/workflows/000077500000000000000000000000001444772566300204015ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/.github/workflows/ci.yml000066400000000000000000000014631444772566300215230ustar00rootroot00000000000000name: Java CI on: push: branches: - master pull_request: branches: - master jobs: build: runs-on: ubuntu-latest strategy: matrix: java: [ 8, 11, 17 ] steps: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: distribution: zulu java-version: ${{ matrix.java }} - uses: actions/cache@v3 with: path: ~/.m2 key: ${{ runner.os }}-${{ matrix.java }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-${{ matrix.java }}-m2 - run: mvn --batch-mode --update-snapshots -Dgpg.skip=true jacoco:prepare-agent verify jacoco:report - uses: codecov/codecov-action@v3 with: file: ./**/target/site/jacoco/jacoco.xml name: codecov Wikidata-Toolkit-0.14.6/.github/workflows/codeql-analysis.yml000066400000000000000000000006031444772566300242130ustar00rootroot00000000000000name: "CodeQL" on: push: branches: [ master ] pull_request: branches: [ master ] schedule: - cron: '32 21 * * 4' jobs: analyze: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: github/codeql-action/init@v2 with: languages: java - uses: github/codeql-action/autobuild@v2 - uses: github/codeql-action/analyze@v2 Wikidata-Toolkit-0.14.6/.github/workflows/deploy.yml000066400000000000000000000017471444772566300224310ustar00rootroot00000000000000name: Maven Release on: release: types: [published] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: distribution: zulu java-version: 8 server-id: ossrh server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD - name: Import GPG Key run: | mkdir -p ~/.gnupg/ echo "$MAVEN_SIGNING_KEY" | gpg --import - env: MAVEN_SIGNING_KEY: ${{ secrets.MAVEN_SIGNING_KEY }} - name: Maven Deploy run: mvn -B -V deploy -Ddeploy env: MAVEN_USERNAME: ${{ secrets.MAVEN_USERNAME }} MAVEN_PASSWORD: ${{ secrets.MAVEN_PASSWORD }} - name: Generate javadocs run: mvn compile javadoc:aggregate - name: Publish javadocs uses: JamesIves/github-pages-deploy-action@v4.4.2 with: branch: gh-pages folder: target/site/apidocs Wikidata-Toolkit-0.14.6/.gitignore000066400000000000000000000012041444772566300167710ustar00rootroot00000000000000*.class # Package Files # *.jar *.war *.ear # IntelliJ .idea/ *.iml *.ipr *.iws # Eclipse .classpath .project .metadata .settings/ local.properties # NetBeans nbbuild/ nbdist/ nbproject/ nb-configuration.xml nbactions.xml # Maven, etc. out/ target/ # WDTK dumpfile download directory dumpfiles/ # WDTK example result directory results/ # Don't apply the above to src/ where Java requires # subdirectories named according to package names. # We do not want to forbid things like "dumpfiles" in # package names. !src/ # Use as directory for local testing code **/src/local/ # General *.pyc *~ *.bak *.sw[o,p] *.tmp .DS_Store Thumbs.db Wikidata-Toolkit-0.14.6/LICENSE.txt000066400000000000000000000261351444772566300166360ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/README.md000066400000000000000000000077241444772566300162750ustar00rootroot00000000000000Wikidata Toolkit ================ ![Build status](https://github.com/Wikidata/Wikidata-Toolkit/workflows/Java%20CI/badge.svg) [![Coverage status](https://codecov.io/gh/Wikidata/Wikidata-Toolkit/branch/master/graph/badge.svg?token=QtTNJdTAbO)](https://codecov.io/gh/Wikidata/Wikidata-Toolkit) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.wikidata.wdtk/wdtk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.wikidata.wdtk%22) [![Project Stats](https://www.openhub.net/p/Wikidata-Toolkit/widgets/project_thin_badge.gif)](https://www.openhub.net/p/Wikidata-Toolkit) Wikidata Toolkit is a Java library for accessing Wikidata and other Wikibase installations. It can be used to create bots, to perform data extraction tasks (e.g., convert all data in Wikidata to a new format), and to do large-scale analyses that are too complex for using a simple SPARQL query service. Documentation ------------- * [Wikidata Toolkit homepage](https://www.mediawiki.org/wiki/Wikidata_Toolkit): project homepage with basic user documentation, including guidelines on how to setup your Java IDE for using Maven and git. * [Wikidata Toolkit examples](https://github.com/Wikidata/Wikidata-Toolkit-Examples): stand-alone Java project that shows how to use Wikidata Toolkit as a library for your own code. * [Wikidata Toolkit Javadocs](http://wikidata.github.io/Wikidata-Toolkit/): API documentation License and Credits ------------------- Authors: [Markus Kroetzsch](http://korrekt.org), [Julian Mendez](https://julianmendez.github.io/), [Fredo Erxleben](https://github.com/fer-rum), [Michael Guenther](https://github.com/guenthermi), [Markus Damm](https://github.com/mardam), [Antonin Delpeuch](http://antonin.delpeuch.eu/), [Thomas Pellissier Tanon](https://thomas.pellissier-tanon.fr/) and [other contributors](https://github.com/Wikidata/Wikidata-Toolkit/graphs/contributors) License: [Apache 2.0](LICENSE.txt) The development of Wikidata Toolkit has been partially funded by the Wikimedia Foundation under the [Wikibase Toolkit Individual Engagement Grant](https://meta.wikimedia.org/wiki/Grants:IEG/Wikidata_Toolkit), and by the German Research Foundation (DFG) under [Emmy Noether grant KR 4381/1-1 "DIAMOND"](https://ddll.inf.tu-dresden.de/web/DIAMOND/en). How to make a release --------------------- During development, the version number in the `pom.xml` files should be the next version number assuming that the next version is a patch release, followed by `-SNAPSHOT`. For instance, if the last version to have been released was `1.2.3`, then the `pom.xml` files should contain `1.2.4-SNAPSHOT`. 1. Pick the version number for the new release you want to publish, following SemVer. If this is going to be a patch release, it should be the version currently in `pom.xml` without the `-SNAPSHOT` suffix. In the following steps, we will assume this new version is `1.2.4`. 2. Write the new version number in the `pom.xml` files with `mvn versions:set -DnewVersion=1.2.4` 3. Add some release notes in the `RELEASE-NOTES.md` file at the root of the repository 4. Commit the changes: `git commit -am "Set version to 1.2.4"` 5. Add a tag for the version: `git tag -a v1.2.4 -m "Version 1.2.4"` 6. Write the next version number in the `pom.xml` file, by incrementing the patch release number: `mvn versions:set -DnewVersion=1.2.5-SNAPSHOT` 7. Commit the changes: `git commit -am "Set version to 1.2.5-SNAPSHOT"` 8. Push commits and tags: `git push --tags && git push` 9. In GitHub's UI, create a release by going to https://github.com/Wikidata/Wikidata-Toolkit/releases/new. Pick the tag you just created, give a title to the release and quickly describe the changes since the previous release (see existing releases for examples). 10. Update the version number mentioned in https://www.mediawiki.org/wiki/Wikidata_Toolkit The library is automatically packaged and uploaded to Maven Central by the continuous deployment (with GitHub Actions). So is the HTML version of the javadoc (to GitHub Pages). Wikidata-Toolkit-0.14.6/RELEASE-NOTES.md000066400000000000000000000414761444772566300173100ustar00rootroot00000000000000Wikidata Toolkit Release Notes ============================== Version 0.14.6 -------------- Small improvement: * The new Wikibase editing API returns the id of the revision of the last edit made (#795) Version 0.14.5 -------------- Bug fixes: * fetching of MediaInfo entities by title, when they contain a dash (#777) * clear the CSRF editing token if it is no longer valid (#442) Version 0.14.4 -------------- Bug fixes: * fix deserialization of lexemes, workaround for https://phabricator.wikimedia.org/T305660 Version 0.14.3 -------------- Bug fixes: * fix fetching of Mids from filenames in the case where multiple filenames do not exist (#745) Version 0.14.2 -------------- Bug fixes: * add `uselang` as optional parameter for wbsearchentities action (#239) Version 0.14.1 -------------- Bug fixes: * fix error handling in newly supported clientLogin method * fix error in deserialization of properties with unknown datatypes Deprecations: * the IRI representation of datatypes in `wdtk-datamodel` is deprecated. If you rely on it, then use it in `wdtk-rdf`, since this is specific to the RDF serialization of the datamodel. Use the strings found in the JSON serialization of properties to identify datatypes instead. Version 0.14.0 -------------- New feature: * login to Wikibase via the recommended API for normal login/password login Bug fixes: * add explicit dependency to okhttp, to avoid linkage issues * upgrade to okhttp 4.10.0 Version 0.13.5 -------------- * downgraded okhttp to 4.2.2 to solve a linkage error (issue #600). We anticipate that this will be reverted once a stable version of okhttp 5 is available and a viable way to avoid such a linkage error is found. Version 0.13.4 -------------- * updated okhttp to 5.0.0-alpha.10 in the hope that it solves a linkage error * registered the EDTF datatype Version 0.13.3 -------------- * fixed media type when uploading files to a MediaWiki API endpoint Version 0.13.2 -------------- * new method to create an EntityDocument independently of its type * new utility method to execute an authenticated HTTP method which posts files Version 0.13.1 -------------- Minor changes to the CI configuration for artifact deployment in Maven Central, no changes in the library itself. Version 0.13.0 -------------- New features: * New API to edit Wikibase entities Bug fixes: * Fetching of non-existent Mids on Commons * Support for missing entity types in DatamodelConverter * Store QuantityValue units as ItemIdValue instead of String. * Allow lexeme lemma list to be empty. Version 0.12.1 -------------- Bug fixes: * Allows empty representation list in `FormDocument` to parse the most recent Wikidata dumps. Version 0.12.0 -------------- Bug fixes: * Allows empty gloss list in `SenseDocument` to parse the most recent Wikidata dumps. New features: * Allows fetching MediaInfo entities using `WikibaseDataFetcher`. * `WikibaseRevisionProcessor` now parses and exposes redirections between entities. * `OAuthApiConnection` to connect to Wikibase API using OAuth. * Allows to fetch the Wikibase edit lag. * Dump file compression is automatically guessed from the file name extensions. Incompatible changes: * More API client errors are now exposed as exception, allowing the API users to act on them. * `OkHTTP` is now used in wikibaseapi-client and big revamp of the client internals with small breaking changes. * Deprecated methods removal across the codebase. Dependency upgrades: * Dropped unused Apache HTTP client dependency * Bump RDF4J to 3.6.4, Jackson to 2.12.3, Apache Commons IO to 2.8, Apache Commons Lang3 to 3.12. Version 0.11.1 -------------- Bug fixes: * Fixes API connection bug due to lower-case set-cookie header sent from Wikidata * Upgrades dependencies to latest version Version 0.11.0 -------------- New features: * Adds basic MediaInfo representation, API retrieval and editing. * Adds support of tags when editing using the API. * Adds UnsupportedValue and UnsupportedEntityIdValue to properly represent unsupported values and entity ids. * RDF: Fixes datatype lookup for entity ids RDF: Adds support of Quantity and MonolingualText to the SomeValueSnak and NoValueSnak converters. * Wikibase API: Throw an exception when credentials have expired. * Updates RDF4J to 2.5.2, Apache Commons Lang to 3.9 and Apache Commons Compress to 1.18. * Properly deserialize and store the Reference hash. * Adds edit methods to Lexeme, Form and Sense. * Adds timeout options to HTTP calls. * Adds exponential back-off for maxlag errors. Incompatible changes: * Removes the wikibase-client package. * Makes Statement.getBestStatements return null if there are no best statements. * Makes RDF output format closer to the one used by Wikibase. * Throw MediaWikiApiErrorException instead of NullPointerException if the edit token is not found. Bug fixes: * Removes main snak value serialization from statement serialization. * Use CSRF token for logout following MediaWiki API change. Version 0.10.0 -------------- Security fixes: * Update Jackson to 2.9.9, fixing [vulnerabilities that might lead to remote code execution](https://www.cvedetails.com/vulnerability-list.php?vendor_id=15866&product_id=42991&version_id=238358&page=1&hasexp=0&opdos=0&opec=0&opov=0&opcsrf=0&opgpriv=0&opsqli=0&opxss=0&opdirt=0&opmemc=0&ophttprs=0&opbyp=0&opfileinc=0&opginf=0&cvssscoremin=0&cvssscoremax=0&year=0&cweid=0&order=1&trc=12&sha=1a71cae633886fb92e024fafb20c582c9e5b072d). New features: * RDF: Adds support of Quantity and MonolingualText to the SomeValueSnak and NoValueSnak converters. * Wikibase API: Throw an exception when credentials have expired. * Updates RDF4J to 2.5.2, Apache Commons Lang to 3.9 and Apache Commons Compress to 1.18. Incompatible changes: * Propagate IOException properly in the Wikibase API module. Version 0.9.0 ------------- New features: * Compatibility with JDK 10 * Compatibility with Android except for the RDF component. It requires Gradle Android plugin 3.0+. * Addition of basic support for Wikibase Lexemes (including forms and senses) * The RDF default output is now the same as query.wikidata.org and specified at https://www.mediawiki.org/wiki/Wikibase/Indexing/RDF_Dump_Format except normalized values that are not supported. * Migration from Sesame to RDF4J * Most of DataModel classes has now with* methods to do easily modification of objects while keeping immutability * parentRevisionId is now provided by the XML dump files reader * WikimediaLanguageCodes.fixLanguageCodeIfDeprecated allows to fix deprecated language codes * StatementGroup (resp. SnakGroup) implements Collection (resp. Collection) * EntityRedirectDocument object in order to easily represent redirections between entities * StatementGroup::getBestStatements utility method to retrieve quickly the best statements of the group * GuidGenerator and an implementation to generate statements uuid easily * When editing entities, the implementation attempts to use the most granular API call to perform the edit, which makes more informative edit summaries. * Addition of QuantityValue.getUnitItemId, TimeValue.getCalendarItemId and GlobeCoordinatesValue.getGlobeItemId to get easily ItemIdValue objects for these three fields * Introduction of DatamodelFilter to split out the filtering capabilities of DatamodelConverter * ApiConnection was changed to an interface, implemented by BasicApiConnection for normal login and (in the future) OAuthApiConnection for OAuth. BasicApiConnection was made serializable with Jackson so that a connection can be saved and restored. Bug fixes: * Retrieval of redirected entities using WbGetEntitiesAction should work * StatementUpdate avoids to do null edits except if intentionally asked so * The WikimediaLanguageCodes lists have been updated * Proper RDF serialization of the Commons GeoShape and Commons Data datatypes Incompatible changes: * Support for JDK 7 is dropped. * The simple data model implementation has been dropped. The Jackson based implementation is now the only one provided by the library. It allows to avoid to maintain two implementations and the cost of conversion between the two representations. The jackson implementation has been moved to the former one package. * Migration from Sesame to RDF4J in the RDF component interface * Updates of various dependencies * The utility classes related to JSON (de)serialization are now private * SiteLink badges are now ItemIdValue and not String * The internal encoding of uniteless QuantityValue unit is now "1" and not "" for consistency with Wikibase * The default value for the "after" parameter of TimeValues is no 0 for compatibility with Wikibase * DatatypeIdValue is not implementing Value anymore. It improves type safety because Wikibase does not allow to use DatatypeIdValue as snak value. * The DatamodelConverter class does not do shallow copy anymore. Please use DatamodelFilter for filtering * The constraint TemplateParser related code have been removed. Constraints are now encoded as statements in Wikidata, making this code only usable in old dumps * The TimeValue timestamps are now serializing years with at least 4 digits and not 11. This replicates a change in Wikibase and make the output timestamps more similar to the ISO/XSD ones. Version 0.8.0 ------------- New features: * Compatibility with JDK 9 * Allow to edit labels, descriptions and aliases using the WikibaseDataEditor (this is a work in progress that is likely to change) * Allow to use the wbEntitySearch API action using WikibaseDataFetcher * Quantities bounds are now optional following the change in Wikibase * Add the "id" field to entity id JSON serialization following the change in Wikibase Bug fixes: * Do not fail when logging in * Do not fail when reading redirections in daily XML dumps * Do not fail when new datatypes are introduced in Wikibase * Make sure that API warnings are read for all requests * Do not fail when reading a bz2 compressed dump when a gzip dump was expected * WikibaseDataFetcher is now able to retrieve more than 50 entities at once * Switch to the new way of retrieving MediaWiki API tokens Version 0.7.0 ------------- New features: * Add a new client action "sqid" that analyses dumps to create the statistics JSON files that are the basis for the SQID Wikidata Browser that is found at https://tools.wmflabs.org/sqid/ Bug fixes: * Fix JavaDoc errors to enable build using Java 8 (with doclint) * Make JSON parser more tolerant towards unknown keys; avoids breaking on recent API changes * Update Wikimedia dump location to https so that dump downloads work again Version 0.6.0 ------------- A new stand-alone example project is now showing how to use WDTK as a library: https://github.com/Wikidata/Wikidata-Toolkit-Examples New features: * Support for new Wikidata property type "external identifier" * Support for new Wikidata property type "math" * Bots: support maxlag parameter and edit-rate throttling * Bots: better Wikidata API error handling * Bots: several real-world bot examples * New convenience methods for accessing Wikidata Java objects, for simpler code * full compatibility with Java 8 Bug fixes: * Fix NullPointerException when trying to establish API connection (issue #217) * Avoid test failures on some platforms (based on too strict assumptions) Version 0.5.0 ------------- New features: * Support for reading and writing live entity data from wikidata.org or any other Wikibase site (issue #162) * New examples for illustrating read/write API support * Support for quantities with units of measurement (new feature in Wikibase; still beta) * New builder classes to simplify construction of EntityDocuments, Statements, and References * Support processing of local dump files by file name in code and command-line client (issue #136) * New example WorldMapProcessor that shows the generation of maps from geographic data * Improved output file naming for examples, taking dump date into account * RDF export uses property register for fewer Web requests during export * RDF export supports P1921 URI patterns to create links to external RDF datasets Bug fixes: * JSON conversion action of the command-line client was forgetting start of entity list. * Update URLs to use https instead of http * Support URLs in sites table that are not protocol-relative (issue #163) Incompatible changes: * EntityDocumentProcessorFilter has a modified constructor that requires a filter object to be given. The direct set methods to define the filter are no longer available. Version 0.4.0 ------------- New features: * Support statements on property documents * More robust JSON parsing: recover after errors to process remaining file * Improved JSON serialization + an example program showing how to do it * Standard (POJO) datamodel implementation now is Serializable * Deep copy functionality for changing between datamodel implementations (DatamodelConverter) * Support for filtering data during copying (e.g., to keep only some languages/properties/sites). * Support arbitrary precision values in globe coordinates * Dependency on JSON.org has been removed to use the faster Jackson library everywhere Bug fixes: * Support RDF export of Monolingual Text Value data in statements. * Significant performance improvements in RDF export of taxonomy data. * Support new Wikimedia Foundation dump file index HTML format (Issue #114) Incompatible changes: * The datatype of all values in GlobeCoordinateValue (latitude, longitude, precision) has changed from long (fixed precision number) to double (floating point number) to match the JSON. * The JSON serializer class org.wikidata.wdtk.datamodel.json.JsonSerializer has vanished. It is replaced by the org.wikidata.wdtk.datamodel.json.jackson.JsonSerializer (almost same interface). Version 0.3.0 ------------- New features: * Added full support for reading data from the API JSON format (now used in all dumps); reading JSON dumps also became much faster with this change * Improved examples (more, faster, easier-to-read programs); documentation on each example is now found in the Readme.md file in the example package * Added iterator access to all statements of an item document, all statements in a statement group, all qualifiers in a claim, all snaks in a snak group, and all snaks in a reference * Dump files are downloaded to temporary files first to prevent incomplete downloads from causing errors * Datamodel objects can now be constructed using the static methods of Datamodel. This makes object creation more convenient. Minor changes: * ItemIdValue and PropertyIdValue objects now have a "site IRI" that can be retrieved. This was called "base IRI" in earlier releases and was only used to construct the full IRI. The new concept is that this IRI is actually the identifier for the site that the entity comes from. It is important to make it retrievable since it is needed (like in previous versions) to construct the object using the factory. * A new helper package in the datamodel module contains common hashCode(), equals(), and toString() methods that can be used by any datamodel implementation. Bug fixes: * Fix grouping of Statements when reading data from dumps (Issue #78) Version 0.2.0 ------------- New features: * Support for serializing Wikibase data in RDF (as illustrated in new example); see http://korrekt.org/page/Introducing_Wikidata_to_the_Linked_Data_Web for details * Simplified code for dump file processing: new helper class DumpProcessingController * Support for resolving site links, based on information from the sites table dump (as demonstrated in a new example program) * Support for SnakGroups (data model updated to group Snaks by property in all lists) * Support for serializing Wikibase data in JSON (as illustrated in new example) Bug fixes: * Support changed Wikimedia dump HTML page format, which caused download to fail (Issue #70) * Support processing of property documents when parsing dumps (Issue #67) * Support SomeValueSnak and NoValueSnak in references (Issue #44) * Use correct site links when importing data from dumps (Issue #37) * Do not attempt to download unfinished dump files (Issue #63) Incompatible changes: * The processing of dumpfiles was simplified, using a new class DumpProcessingController. The former method WmfDumpFileManager#processRecentRevisionDumps() was replaced by DumpProcessingController#processAllRecentRevisionDumps(). See the examples for example code. * Dump files no longer support the retrieval of the maximal revision id, since this information is no longer published for the main dumps on the Wikimedia site. Version 0.1.0 ------------- New features: * Initial Java implementation of Wikibase datamodel * Support for downloading Wikimedia dumpfiles * Support for parsing MediaWiki XML dumps * Support for parsing Wikibase dump contents to get entity data * Example Java program shows how to process Wikidata dump files Bug fixes: * not applicable; this is the very first release Know issues: * Entities loaded from dump get wrong base IRI (issue #43) * URLs for sitelinks are missing (issue #37) Wikidata-Toolkit-0.14.6/pom.xml000066400000000000000000000226201444772566300163230ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 pom Wikidata Toolkit A Java-based library for working with Wikidata and Wikibase https://www.mediawiki.org/wiki/Wikidata_Toolkit wdtk-datamodel wdtk-dumpfiles wdtk-storage wdtk-wikibaseapi wdtk-util wdtk-testing wdtk-examples wdtk-distribution wdtk-rdf Apache License, Version 2.0 http://www.apache.org/licenses/LICENSE-2.0.txt 2014 Wikidata Toolkit Developers markus Markus Kroetzsch markus@semantic-mediawiki.org julian Julian Mendez julian.mendez@tu-dresden.de fer-rum Fredo Erxleben fredo.erxleben@tu-dresden.de michael Michael Günther guenthermi50@yahoo.de tpt Thomas Pellissier Tanon thomas@pellissier-tanon.fr wetneb Antonin Delpeuch antonin@delpeuch.eu UTF-8 1.23.0 3.12.0 2.13.0 2.15.2 4.13.2 2.2 4.11.0 3.7.7 2.0.7 1.7.2 1.1.0 4.11.0 junit junit ${junitVersion} test org.hamcrest hamcrest-core ${hamcrestVersion} test org.mockito mockito-core ${mockitoVersion} test org.slf4j slf4j-api ${slf4jVersion} org.apache.commons commons-lang3 ${apacheCommonsLangVersion} org.codehaus.mojo license-maven-plugin 2.1.0 first update-file-header update-project-license process-sources apache_v2 src/main/java src/test/java org.eclipse.m2e lifecycle-mapping 1.0.0 org.codehaus.mojo license-maven-plugin [1.2,) update-project-license update-file-header org.codehaus.mojo license-maven-plugin org.apache.maven.plugins maven-compiler-plugin 3.11.0 8 8 org.eluder.coveralls coveralls-maven-plugin 4.3.0 javax.xml.bind jaxb-api 2.3.1 org.jacoco jacoco-maven-plugin 0.8.10 prepare-agent prepare-agent org.apache.maven.plugins maven-source-plugin 3.3.0 attach-sources jar-no-fork org.apache.maven.plugins maven-javadoc-plugin 3.5.0 Wikidata Toolkit homepage]]> 8 attach-javadocs jar org.sonatype.plugins nexus-staging-maven-plugin 1.6.13 true ossrh https://oss.sonatype.org/ true org.apache.maven.plugins maven-gpg-plugin 3.1.0 sign-artifacts verify sign org.apache.maven.plugins maven-scm-publish-plugin 3.2.1 ${project.build.directory}/scmpublish Publishing javadoc for ${project.artifactId}:${project.version} ${project.reporting.outputDirectory}/apidocs scm:git:https://github.com/Wikidata/Wikidata-Toolkit.git gh-pages https://github.com/Wikidata/Wikidata-Toolkit.git scm:git:https://github.com/Wikidata/Wikidata-Toolkit.git scm:git:https://github.com/Wikidata/Wikidata-Toolkit.git ossrh https://oss.sonatype.org/content/repositories/snapshots ossrh Central Repository OSSRH https://oss.sonatype.org/service/local/staging/deploy/maven2/ Wikidata-Toolkit-0.14.6/wdtk-datamodel/000077500000000000000000000000001444772566300177055ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/LICENSE.txt000066400000000000000000000261351444772566300215370ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-datamodel/pom.xml000066400000000000000000000040201444772566300212160ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-datamodel jar Wikidata Toolkit Data Model Java implementation of the Wikibase data model ${project.groupId} wdtk-testing ${project.version} test ${project.groupId} wdtk-util ${project.version} com.fasterxml.jackson.core jackson-annotations ${jacksonVersion} com.fasterxml.jackson.core jackson-core ${jacksonVersion} com.fasterxml.jackson.core jackson-databind ${jacksonVersion} com.fasterxml.jackson.datatype jackson-datatype-jdk8 ${jacksonVersion} org.threeten threeten-extra ${threetenVersion} commons-io commons-io ${apacheCommonsIOVersion} test src/test/resources Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/000077500000000000000000000000001444772566300204745ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/000077500000000000000000000000001444772566300214205ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/000077500000000000000000000000001444772566300223415ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/000077500000000000000000000000001444772566300231305ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/000077500000000000000000000000001444772566300247255ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300256765ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/000077500000000000000000000000001444772566300276305ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/000077500000000000000000000000001444772566300312725ustar00rootroot00000000000000AbstractDataObjectBuilder.java000066400000000000000000000040121444772566300370460ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; /** * Abstract base class for all builder objects that create data model objects. * * @author Markus Kroetzsch * * @param * the type of the eventual concrete builder implementation * @param * the type of the object that is being built */ public abstract class AbstractDataObjectBuilder, O> { static DataObjectFactory factory = new DataObjectFactoryImpl(); private boolean isBuilt = false; /** * Returns the object that has been built. * * @return constructed object * @throws IllegalStateException * if the object was built already */ public abstract O build(); /** * Checks if the object has already been built, and throws an exception if * yes. If no, then the object is recorded as having been built. * * @throws IllegalStateException * if the object was built already */ protected void prepareBuild() { if (this.isBuilt) { throw new IllegalStateException("The entity has been built"); } this.isBuilt = true; } /** * Returns the current object with the correct builder type. * * @return this */ abstract protected T getThis(); } AliasUpdateBuilder.java000066400000000000000000000205201444772566300355600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; /** * Builder for incremental construction of {@link AliasUpdate} objects. */ public class AliasUpdateBuilder { private String languageCode; private final List base; private List recreated; private final List added = new ArrayList<>(); private final Set removed = new HashSet<>(); private AliasUpdateBuilder(List base) { if (base != null) { for (MonolingualTextValue alias : base) { Objects.requireNonNull(alias, "Base document aliases cannot be null."); } Validate.isTrue( base.stream().map(v -> v.getLanguageCode()).distinct().count() <= 1, "Base document aliases must have the same language code."); Validate.isTrue(base.stream().distinct().count() == base.size(), "Base document aliases must be unique."); this.base = new ArrayList<>(base); languageCode = base.stream().map(v -> v.getLanguageCode()).findFirst().orElse(null); } else this.base = null; } /** * Creates new builder object for constructing alias update. * * @return update builder object */ public static AliasUpdateBuilder create() { return new AliasUpdateBuilder(null); } /** * Creates new builder object for constructing update of given base revision * aliases. Provided aliases will be used to check correctness of changes. *

* Since all changes will be checked after the {@link AliasUpdate} is passed to * {@link TermedDocumentUpdateBuilder} anyway, it is usually unnecessary to use * this method. It is simpler to initialize the builder with {@link #create()}. * * @param aliases * aliases from base revision of the document * @return update builder object * @throws NullPointerException * if {@code aliases} or any of its items is {@code null} * @throws IllegalArgumentException * if there are duplicate items in {@code aliases} */ public static AliasUpdateBuilder forAliases(List aliases) { Objects.requireNonNull(aliases, "Base document alias collection cannot be null."); return new AliasUpdateBuilder(aliases); } /** * Adds new alias. This operation can be repeated to add multiple aliases in one * update. It can be combined with {@link #remove(MonolingualTextValue)}. * Attempt to add the same alias twice or to add alias already present in base * document (if available) is silently ignored. Adding previously removed alias * cancels the removal. If {@link #recreate(List)} was called before, this * method will add the alias to the end of the new alias list. * * @param alias * new alias * @return {@code this} (fluent method) * @throws NullPointerException * if {@code alias} is {@code null} * @throws IllegalArgumentException * if the alias has language code inconsistent with other aliases */ public AliasUpdateBuilder add(MonolingualTextValue alias) { Objects.requireNonNull(alias, "Alias cannot be null."); if (languageCode != null) { Validate.isTrue(languageCode.equals(alias.getLanguageCode()), "Inconsistent language codes."); } if (recreated != null) { if (!recreated.contains(alias)) { recreated.add(alias); if (recreated.equals(base)) { recreated = null; } } } else if (removed.contains(alias)) { removed.remove(alias); } else if (!added.contains(alias) && (base == null || !base.contains(alias))) { added.add(alias); } languageCode = alias.getLanguageCode(); return this; } /** * Removed existing alias. This operation can be repeated to remove multiple * aliases in one update. It can be combined with * {@link #add(MonolingualTextValue)}. Attempt to remove the same alias twice or * to remove alias not present in base document (if available) is silently * ignored. Removing previously added alias cancels the addition. If * {@link #recreate(List)} was called before, this method will remove the alias * from the new alias list. * * @param alias * removed alias * @return {@code this} (fluent method) * @throws NullPointerException * if {@code alias} is {@code null} * @throws IllegalArgumentException * if the alias has language code inconsistent with other aliases */ public AliasUpdateBuilder remove(MonolingualTextValue alias) { Objects.requireNonNull(alias, "Alias cannot be null."); if (languageCode != null) { Validate.isTrue(languageCode.equals(alias.getLanguageCode()), "Inconsistent language codes."); } if (recreated != null) { recreated.remove(alias); if (recreated.equals(base)) { recreated = null; } } else if (added.contains(alias)) { added.remove(alias); } else if (!removed.contains(alias) && (base == null || base.contains(alias))) { removed.add(alias); } languageCode = alias.getLanguageCode(); return this; } /** * Replaces current alias list with completely new alias list. Any previous * changes are discarded. To remove all aliases, pass empty list to this method. * If the new alias list is identical (including order) to base document alias * list (if provided), the update will be empty. * * @param aliases * new list of aliases * @return {@code this} (fluent method) * @throws NullPointerException * if {@code aliases} or any of its items is {@code null} * @throws IllegalArgumentException * if some alias has inconsistent language code or there are * duplicates */ public AliasUpdateBuilder recreate(List aliases) { Objects.requireNonNull(aliases, "Alias list cannot be null."); for (MonolingualTextValue alias : aliases) { Objects.requireNonNull(alias, "Aliases cannot be null."); } Validate.isTrue( aliases.stream().map(v -> v.getLanguageCode()).distinct().count() <= 1, "Aliases must have the same language code."); Validate.isTrue( aliases.stream().map(v -> v.getText()).distinct().count() == aliases.size(), "All aliases must be unique."); if (languageCode != null && !aliases.isEmpty()) { Validate.isTrue(languageCode.equals(aliases.get(0).getLanguageCode()), "Inconsistent language codes."); } added.clear(); removed.clear(); if (!aliases.equals(base)) { recreated = new ArrayList<>(aliases); } else { recreated = null; } if (!aliases.isEmpty()) { languageCode = aliases.get(0).getLanguageCode(); } return this; } /** * Replays all changes in provided update into this builder object. Changes are * performed as if by calling {@link #add(MonolingualTextValue)}, * {@link #remove(MonolingualTextValue)}, and {@link #recreate(List)} methods. * * @param update * alias update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public AliasUpdateBuilder append(AliasUpdate update) { Objects.requireNonNull(update, "Alias update cannot be null."); update.getRecreated().ifPresent(this::recreate); for (MonolingualTextValue alias : update.getRemoved()) { remove(alias); } for (MonolingualTextValue alias : update.getAdded()) { add(alias); } return this; } /** * Creates new {@link AliasUpdate} object with contents of this builder object. * * @return constructed object */ public AliasUpdate build() { return Datamodel.makeAliasUpdate(recreated, added, removed); } } DataFormatter.java000066400000000000000000000044661444772566300346250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.math.BigDecimal; import java.text.DecimalFormat; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; /** * This class contains static methods to create string notations for values of * several datatypes and classes. * * @author Michael Günther * */ public class DataFormatter { final static String FORMAT_YEAR = "00000000000"; final static String FORMAT_OTHER = "00"; /** * Returns a representation of the date from the value attributes as ISO * 8601 encoding. * * @param value * @return ISO 8601 value (String) */ public static String formatTimeISO8601(TimeValue value) { StringBuilder builder = new StringBuilder(); DecimalFormat yearForm = new DecimalFormat(FORMAT_YEAR); DecimalFormat timeForm = new DecimalFormat(FORMAT_OTHER); if (value.getYear() > 0) { builder.append("+"); } builder.append(yearForm.format(value.getYear())); builder.append("-"); builder.append(timeForm.format(value.getMonth())); builder.append("-"); builder.append(timeForm.format(value.getDay())); builder.append("T"); builder.append(timeForm.format(value.getHour())); builder.append(":"); builder.append(timeForm.format(value.getMinute())); builder.append(":"); builder.append(timeForm.format(value.getSecond())); builder.append("Z"); return builder.toString(); } /** * Returns a signed string representation of the given number. * * @param number * @return String for BigDecimal value */ public static String formatBigDecimal(BigDecimal number) { if (number.signum() != -1) { return "+" + number.toString(); } else { return number.toString(); } } } Datamodel.java000066400000000000000000001343771444772566300337670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.math.BigDecimal; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; /** * This class contains static methods to create WDTK data objects. This is the * preferred way of creating data objects in WDTK. * * @author Markus Kroetzsch */ public class Datamodel { /** * The site IRI of Wikidata. */ static final public String SITE_WIKIDATA = "http://www.wikidata.org/entity/"; /** * The site IRI of Wikimedia Commons. */ static final public String SITE_WIKIMEDIA_COMMONS = "http://commons.wikimedia.org/entity/"; private final static DataObjectFactory factory = new DataObjectFactoryImpl(); /** * Creates an {@link ItemIdValue}. * * @param id * a string of the form Qn... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return an {@link ItemIdValue} corresponding to the input */ public static ItemIdValue makeItemIdValue(String id, String siteIri) { return factory.getItemIdValue(id, siteIri); } /** * Creates an {@link ItemIdValue} for Wikidata. * * @param id * a string of the form Qn... where n... is the string * representation of a positive integer number * @return an {@link ItemIdValue} corresponding to the input */ public static ItemIdValue makeWikidataItemIdValue(String id) { return factory.getItemIdValue(id, SITE_WIKIDATA); } /** * Creates a {@link PropertyIdValue}. * * @param id * a string of the form Pn... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return a {@link PropertyIdValue} corresponding to the input */ public static PropertyIdValue makePropertyIdValue(String id, String siteIri) { return factory.getPropertyIdValue(id, siteIri); } /** * Creates a {@link PropertyIdValue}. * * @param id * a string of the form Pn... where n... is the string * representation of a positive integer number * @return a {@link PropertyIdValue} corresponding to the input */ public static PropertyIdValue makeWikidataPropertyIdValue(String id) { return factory.getPropertyIdValue(id, SITE_WIKIDATA); } /** * Creates an {@link LexemeIdValue}. * * @param id * a string of the form Ln... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return an {@link LexemeIdValue} corresponding to the input */ public static LexemeIdValue makeLexemeIdValue(String id, String siteIri) { return factory.getLexemeIdValue(id, siteIri); } /** * Creates an {@link LexemeIdValue} for Wikidata. * * @param id * a string of the form Ln... where n... is the string * representation of a positive integer number * @return an {@link LexemeIdValue} corresponding to the input */ public static LexemeIdValue makeWikidataLexemeIdValue(String id) { return factory.getLexemeIdValue(id, SITE_WIKIDATA); } /** * Creates an {@link FormIdValue}. * * @param id * a string of the form Ln...-Fm... where n... and m... are the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return an {@link FormIdValue} corresponding to the input */ public static FormIdValue makeFormIdValue(String id, String siteIri) { return factory.getFormIdValue(id, siteIri); } /** * Creates an {@link FormIdValue} for Wikidata. * * @param id * a string of the form Ln...-F... where n... and m... are the string * representation of a positive integer number * @return an {@link FormIdValue} corresponding to the input */ public static FormIdValue makeWikidataFormIdValue(String id) { return factory.getFormIdValue(id, SITE_WIKIDATA); } /** * Creates an {@link SenseIdValue}. * * @param id * a string of the form Ln...-Sm... where n... and m... are the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return an {@link SenseIdValue} corresponding to the input */ public static SenseIdValue makeSenseIdValue(String id, String siteIri) { return factory.getSenseIdValue(id, siteIri); } /** * Creates an {@link SenseIdValue} for Wikidata. * * @param id * a string of the form Ln...-S... where n... and m... are the string * representation of a positive integer number * @return an {@link SenseIdValue} corresponding to the input */ public static SenseIdValue makeWikidataSenseIdValue(String id) { return factory.getSenseIdValue(id, SITE_WIKIDATA); } /** * Creates an {@link MediaInfoIdValue}. * * @param id * a string of the form Mn... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return an {@link MediaInfoIdValue} corresponding to the input */ public static MediaInfoIdValue makeMediaInfoIdValue(String id, String siteIri) { return factory.getMediaInfoIdValue(id, siteIri); } /** * Creates an {@link MediaInfoIdValue} for Wikimedia Commons. * * @param id * a string of the form Mn... where n... is the string * representation of a positive integer number * @return an {@link MediaInfoIdValue} corresponding to the input */ public static MediaInfoIdValue makeWikimediaCommonsMediaInfoIdValue(String id) { return factory.getMediaInfoIdValue(id, SITE_WIKIMEDIA_COMMONS); } /** * Creates a {@link DatatypeIdValue}. The datatype IRI is usually one of the * constants defined in {@link DatatypeIdValue}, but this is not enforced, * since there might be extensions that provide additional types. * * @param id * the IRI string that identifies the datatype * @return a {@link DatatypeIdValue} corresponding to the input * @deprecated use {@link #makeDatatypeIdValueFromJsonString(String)} */ public static DatatypeIdValue makeDatatypeIdValue(String id) { return factory.getDatatypeIdValue(id); } /** * Creates a {@link DatatypeIdValue}. The string is an identifier used in * the JSON serialization of properties to identify the datatype, and is one * of the constants defined in {@link DatatypeIdValue}, but this is not enforced, * since there are extensions that provide additional types. * * @param jsonString * the string that identifies the datatype * @return a {@link DatatypeIdValue} corresponding to the input */ public static DatatypeIdValue makeDatatypeIdValueFromJsonString(String jsonString) { return factory.getDatatypeIdValueFromJsonId(jsonString); } /** * Creates a {@link TimeValue}. * * @param year * a year number, where 0 refers to 1BCE * @param month * a month number between 1 and 12 * @param day * a day number between 1 and 31 * @param hour * an hour number between 0 and 23 * @param minute * a minute number between 0 and 59 * @param second * a second number between 0 and 60 (possible leap second) * @param precision * a value in the range of {@link TimeValue#PREC_DAY}, ..., * {@link TimeValue#PREC_1GY} * @param beforeTolerance * non-negative integer tolerance before the value; see * {@link TimeValue#getBeforeTolerance()} * @param afterTolerance * non-zero, positive integer tolerance before the value; see * {@link TimeValue#getAfterTolerance()} * @param timezoneOffset * offset in minutes that should be applied when displaying this * time * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} * @return a {@link TimeValue} corresponding to the input */ public static TimeValue makeTimeValue(long year, byte month, byte day, byte hour, byte minute, byte second, byte precision, int beforeTolerance, int afterTolerance, int timezoneOffset, String calendarModel) { return factory.getTimeValue(year, month, day, hour, minute, second, precision, beforeTolerance, afterTolerance, timezoneOffset, calendarModel); } /** * Creates a {@link TimeValue} for a given date and time. The precision is * automatically set to {@link TimeValue#PREC_SECOND}. * * @param year * a year number, where 0 refers to 1BCE * @param month * a month number between 1 and 12 * @param day * a day number between 1 and 31 * @param hour * an hour number between 0 and 23 * @param minute * a minute number between 0 and 59 * @param second * a second number between 0 and 60 (possible leap second) * @param timezoneOffset * offset in minutes that should be applied when displaying this * time * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} * @return a {@link TimeValue} corresponding to the input */ public static TimeValue makeTimeValue(long year, byte month, byte day, byte hour, byte minute, byte second, int timezoneOffset, String calendarModel) { return factory.getTimeValue(year, month, day, hour, minute, second, TimeValue.PREC_SECOND, 0, 0, timezoneOffset, calendarModel); } /** * Creates a {@link TimeValue} for a given date. The precision is * automatically set to {@link TimeValue#PREC_DAY}. * * @param year * a year number, where 0 refers to 1BCE * @param month * a month number between 1 and 12 * @param day * a day number between 1 and 31 * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} * @return a {@link TimeValue} corresponding to the input */ public static TimeValue makeTimeValue(long year, byte month, byte day, String calendarModel) { return factory.getTimeValue(year, month, day, (byte) 0, (byte) 0, (byte) 0, TimeValue.PREC_DAY, 0, 0, 0, calendarModel); } /** * Creates a {@link GlobeCoordinatesValue}. * * @param latitude * the latitude of the coordinates in degrees * @param longitude * the longitude of the coordinates in degrees * @param precision * the precision of the coordinates in degrees * @param globeIri * IRI specifying the celestial objects of the coordinates * @return a {@link GlobeCoordinatesValue} corresponding to the input */ public static GlobeCoordinatesValue makeGlobeCoordinatesValue( double latitude, double longitude, double precision, String globeIri) { return factory.getGlobeCoordinatesValue(latitude, longitude, precision, globeIri); } /** * Creates a {@link StringValue}. * * @param string * @return a {@link StringValue} corresponding to the input */ public static StringValue makeStringValue(String string) { return factory.getStringValue(string); } /** * Creates a {@link MonolingualTextValue}. * * @param text * the text of the value * @param languageCode * the language code of the value * @return a {@link MonolingualTextValue} corresponding to the input */ public static MonolingualTextValue makeMonolingualTextValue(String text, String languageCode) { return factory.getMonolingualTextValue(text, languageCode); } /** * Creates a {@link QuantityValue}. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity * @param upperBound * the upper bound of the numeric value of this quantity * @param unit * the unit identifier to use for this quantity * @return a {@link QuantityValue} corresponding to the input */ public static QuantityValue makeQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, ItemIdValue unit) { return factory.getQuantityValue(numericValue, lowerBound, upperBound, unit); } /** * Creates a {@link QuantityValue} without bounds. * * @param numericValue * the numeric value of this quantity * @param unit * the unit identifier to use for this quantity * @return a {@link QuantityValue} corresponding to the input */ public static QuantityValue makeQuantityValue(BigDecimal numericValue, ItemIdValue unit) { return factory.getQuantityValue(numericValue, unit); } /** * Creates a {@link QuantityValue} with an empty unit. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity * @param upperBound * the upper bound of the numeric value of this quantity * @return a {@link QuantityValue} corresponding to the input */ public static QuantityValue makeQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound) { return factory.getQuantityValue(numericValue, lowerBound, upperBound); } /** * Creates a {@link QuantityValue} with an empty unit and without bounds. * * @param numericValue * the numeric value of this quantity * @return a {@link QuantityValue} corresponding to the input */ public static QuantityValue makeQuantityValue(BigDecimal numericValue) { return factory.getQuantityValue(numericValue); } /** * Creates a {@link QuantityValue} from long numbers. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity * @param upperBound * the upper bound of the numeric value of this quantity * @return a {@link QuantityValue} corresponding to the input */ public static QuantityValue makeQuantityValue(long numericValue, long lowerBound, long upperBound) { return factory.getQuantityValue(new BigDecimal(numericValue), new BigDecimal(lowerBound), new BigDecimal(upperBound)); } /** * Creates a {@link ValueSnak}. * * @param propertyId * @param value * @return a {@link ValueSnak} corresponding to the input */ public static ValueSnak makeValueSnak(PropertyIdValue propertyId, Value value) { return factory.getValueSnak(propertyId, value); } /** * Creates a {@link SomeValueSnak}. * * @param propertyId * @return a {@link SomeValueSnak} corresponding to the input */ public static SomeValueSnak makeSomeValueSnak(PropertyIdValue propertyId) { return factory.getSomeValueSnak(propertyId); } /** * Creates a {@link NoValueSnak}. * * @param propertyId * @return a {@link NoValueSnak} corresponding to the input */ public static NoValueSnak makeNoValueSnak(PropertyIdValue propertyId) { return factory.getNoValueSnak(propertyId); } /** * Creates a {@link SnakGroup}. * * @param snaks * a non-empty list of snaks that use the same property * @return a {@link SnakGroup} corresponding to the input */ public static SnakGroup makeSnakGroup(List snaks) { return factory.getSnakGroup(snaks); } /** * Creates a {@link Claim}. It might be more convenient to use * {@link #makeStatement} directly if you want to build a statement. * * @param subject * the subject the Statement refers to * @param mainSnak * the main Snak of the Statement * @param qualifiers * the qualifiers of the Statement, grouped in SnakGroups * @return a {@link Claim} corresponding to the input */ public static Claim makeClaim(EntityIdValue subject, Snak mainSnak, List qualifiers) { return factory.getClaim(subject, mainSnak, qualifiers); } /** * Creates a {@link Reference}. It might be more convenient to use the * {@link ReferenceBuilder} instead. * * @param snakGroups * list of snak groups * @return a {@link Reference} corresponding to the input */ public static Reference makeReference(List snakGroups) { return factory.getReference(snakGroups); } /** * Creates a {@link Statement}. It might be more convenient to use the * {@link StatementBuilder} instead. *

* The string id is used mainly for communication with a Wikibase site, in * order to refer to statements of that site. When creating new statements * that are not on any site, the empty string can be used. * * @param subject * the subject the Statement refers to * @param mainSnak * the main Snak of the Statement * @param qualifiers * the qualifiers of the Statement, grouped in SnakGroups * @param references * the references for the Statement * @param rank * the rank of the Statement * @param statementId * the string id of the Statement * @return a {@link Statement} corresponding to the input */ public static Statement makeStatement(EntityIdValue subject, Snak mainSnak, List qualifiers, List references, StatementRank rank, String statementId) { return factory.getStatement(subject, mainSnak, qualifiers, references, rank, statementId); } /** * Creates a {@link Statement}. It might be more convenient to use the * {@link StatementBuilder} instead. *

* The string id is used mainly for communication with a Wikibase site, in * order to refer to statements of that site. When creating new statements * that are not on any site, the empty string can be used. * * @param claim * the main claim the Statement refers to * @param references * the references for the Statement * @param rank * the rank of the Statement * @param statementId * the string id of the Statement * @return a {@link Statement} corresponding to the input */ public static Statement makeStatement(Claim claim, List references, StatementRank rank, String statementId) { return factory.getStatement(claim, references, rank, statementId); } /** * Creates a {@link StatementGroup}. * * @param statements * a non-empty list of statements that use the same subject and * main-snak property in their claim * @return a {@link StatementGroup} corresponding to the input */ public static StatementGroup makeStatementGroup(List statements) { return factory.getStatementGroup(statements); } /** * Creates a {@link SiteLink}. * * @param title * the title string of the linked page, including namespace * prefixes if any * @param siteKey * the string key of the site of the linked article * @param badges * the list of badges of the linked article * @return a {@link SiteLink} corresponding to the input */ public static SiteLink makeSiteLink(String title, String siteKey, List badges) { return factory.getSiteLink(title, siteKey, badges); } /** * Creates a {@link SiteLink} without badges. * * @param title * the title string of the linked page, including namespace * prefixes if any * @param siteKey * the string key of the site of the linked article * @return a {@link SiteLink} corresponding to the input */ public static SiteLink makeSiteLink(String title, String siteKey) { return factory.getSiteLink(title, siteKey, Collections.emptyList()); } /** * Creates an empty {@link PropertyDocument}. * * @param propertyId * the id of the property that data is about * @param datatypeId * the datatype of that property * @return a {@link PropertyDocument} corresponding to the input */ public static PropertyDocument makePropertyDocument(PropertyIdValue propertyId, DatatypeIdValue datatypeId) { return makePropertyDocument(propertyId, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), datatypeId); } /** * Creates a {@link PropertyDocument} with empty revision id. It might be * more convenient to use the {@link PropertyDocumentBuilder} instead. * * @param propertyId * the id of the property that data is about * @param labels * the list of labels of this property, with at most one label * for each language code * @param descriptions * the list of descriptions of this property, with at most one * description for each language code * @param aliases * the list of aliases of this property * @param statementGroups * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param datatypeId * the datatype of that property * @return a {@link PropertyDocument} corresponding to the input */ public static PropertyDocument makePropertyDocument( PropertyIdValue propertyId, List labels, List descriptions, List aliases, List statementGroups, DatatypeIdValue datatypeId) { return makePropertyDocument(propertyId, labels, descriptions, aliases, statementGroups, datatypeId, 0); } /** * Creates a {@link PropertyDocument}. It might be more convenient to use * the {@link PropertyDocumentBuilder} instead. * * @param propertyId * the id of the property that data is about * @param labels * the list of labels of this property, with at most one label * for each language code * @param descriptions * the list of descriptions of this property, with at most one * description for each language code * @param aliases * the list of aliases of this property * @param statementGroups * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param datatypeId * the datatype of that property * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return a {@link PropertyDocument} corresponding to the input */ public static PropertyDocument makePropertyDocument( PropertyIdValue propertyId, List labels, List descriptions, List aliases, List statementGroups, DatatypeIdValue datatypeId, long revisionId) { return factory.getPropertyDocument(propertyId, labels, descriptions, aliases, statementGroups, datatypeId, revisionId); } /** * Creates an empty{@link ItemDocument}. * * @param itemIdValue * the id of the item that data is about * @return an {@link ItemDocument} corresponding to the input */ public static ItemDocument makeItemDocument(ItemIdValue itemIdValue) { return makeItemDocument(itemIdValue, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), 0); } /** * Creates an {@link ItemDocument} with empty revision id. It might be more * convenient to use the {@link ItemDocumentBuilder} instead. * * @param itemIdValue * the id of the item that data is about * @param labels * the list of labels of this item, with at most one label for * each language code * @param descriptions * the list of descriptions of this item, with at most one * description for each language code * @param aliases * the list of aliases of this item * @param statementGroups * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param siteLinks * the sitelinks of this item by site key * @return an {@link ItemDocument} corresponding to the input */ public static ItemDocument makeItemDocument(ItemIdValue itemIdValue, List labels, List descriptions, List aliases, List statementGroups, Map siteLinks) { return makeItemDocument(itemIdValue, labels, descriptions, aliases, statementGroups, siteLinks, 0); } /** * Creates an {@link ItemDocument}. It might be more convenient to use the * {@link ItemDocumentBuilder} instead. * * @param itemIdValue * the id of the item that data is about * @param labels * the list of labels of this item, with at most one label for * each language code * @param descriptions * the list of descriptions of this item, with at most one * description for each language code * @param aliases * the list of aliases of this item * @param statementGroups * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param siteLinks * the sitelinks of this item by site key * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return an {@link ItemDocument} corresponding to the input */ public static ItemDocument makeItemDocument(ItemIdValue itemIdValue, List labels, List descriptions, List aliases, List statementGroups, Map siteLinks, long revisionId) { return factory.getItemDocument(itemIdValue, labels, descriptions, aliases, statementGroups, siteLinks, revisionId); } /** * Creates an empty {@link LexemeDocument}. * * @param lexemeIdValue * the id of the lexeme that data is about * @param lexicalCategory * the lexical category to which the lexeme belongs * (noun, verb...) * @param language * the language to which the lexeme belongs * (French, British English...) * @param lemmas * the human readable representations of the lexeme * @return a {@link LexemeDocument} corresponding to the input */ public static LexemeDocument makeLexemeDocument(LexemeIdValue lexemeIdValue, ItemIdValue lexicalCategory, ItemIdValue language, List lemmas) { return makeLexemeDocument(lexemeIdValue, lexicalCategory, language, lemmas, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); } /** * Creates a {@link LexemeDocument}. * * @param lexemeIdValue * the id of the lexeme that data is about * @param lexicalCategory * the lexical category to which the lexeme belongs * (noun, verb...) * @param language * the language to which the lexeme belongs * (French, British English...) * @param lemmas * the human readable representations of the lexeme * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given lexemeIdValue as their subject * @param forms * the forms of the lexeme * @param senses * the senses of the lexeme * @return a {@link LexemeDocument} corresponding to the input */ public static LexemeDocument makeLexemeDocument(LexemeIdValue lexemeIdValue, ItemIdValue lexicalCategory, ItemIdValue language, List lemmas, List statementGroups, List forms, List senses) { return factory.getLexemeDocument(lexemeIdValue, lexicalCategory, language, lemmas, statementGroups, forms, senses, 0); } /** * Creates a {@link FormDocument}. * * If you plan to add this form to a specific lexeme, * it might be easier to use {@link LexemeDocument#createForm(List)}. * * @param formIdValue * the id of the form that data is about * @param representations * the list of representations of this lexeme, with at most one * lemma for each language code * @param grammaticalFeatures * the grammatical features of the lexeme * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given lexemeIdValue as their subject * @return a {@link LexemeDocument} corresponding to the input */ public static FormDocument makeFormDocument(FormIdValue formIdValue, List representations, List grammaticalFeatures, List statementGroups) { return factory.getFormDocument(formIdValue, representations, grammaticalFeatures, statementGroups, 0); } /** * Creates a {@link SenseDocument}. * * If you plan to add this sense to a specific lexeme, * it might be easier to use {@link LexemeDocument#createSense(List)} )}. * * @param senseIdValue * the id of the form that data is about * @param glosses * the list of glosses of this lexeme, with at most one * gloss for each language code * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given lexemeIdValue as their subject * @return a {@link SenseDocument} corresponding to the input */ public static SenseDocument makeSenseDocument(SenseIdValue senseIdValue, List glosses, List statementGroups) { return factory.getSenseDocument(senseIdValue, glosses, statementGroups, 0); } /** * Creates an empty {@link MediaInfoDocument}. * * @param mediaInfoIdValue * the id of the media that data is about * @return an {@link MediaInfoDocument} corresponding to the input */ public static MediaInfoDocument makeMediaInfoDocument(MediaInfoIdValue mediaInfoIdValue) { return makeMediaInfoDocument(mediaInfoIdValue, Collections.emptyList(), Collections.emptyList()); } /** * Creates a {@link MediaInfoDocument}. * * @param mediaInfoIdValue * the id of the media that data is about * @param labels * the list of labels of this media, with at most one label for * each language code * @param statementGroups * the list of statement groups of this media info; all of them must * have the given mediaInfoIdValue as their subject * @return an {@link MediaInfoDocument} corresponding to the input */ public static MediaInfoDocument makeMediaInfoDocument(MediaInfoIdValue mediaInfoIdValue, List labels, List statementGroups) { return factory.getMediaInfoDocument(mediaInfoIdValue, labels, statementGroups, 0); } /** * Creates new {@link TermUpdate}. It might be more convenient to * use {@link TermUpdateBuilder}. * * @param modified * added or changed values * @param removed * language codes of removed values * @return new {@link TermUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static TermUpdate makeTermUpdate( Collection modified, Collection removed) { return factory.getTermUpdate(modified, removed); } /** * Creates new {@link AliasUpdate}. Callers should specify either * {@code recreated} parameter or {@code added} and {@code removed} parameters, * because combination of the two update approaches is not possible. To remove * all aliases, pass empty list in {@code recreated} parameter. *

* In most cases, it is more convenient to use {@link #makeAliasUpdate(List)}, * {@link #makeAliasUpdate(List, Collection)}, or {@link AliasUpdateBuilder}. * * @param recreated * new list of aliases that completely replaces the old ones or * {@code null} to not recreate aliases * @param added * aliases added in this update or empty collection for no additions * @param removed * aliases removed in this update or empty collection for no removals * @return new {@link AliasUpdate} * @throws NullPointerException * if {@code added}, {@code removed}, or any alias is {@code null} * @throws IllegalArgumentException * if given invalid combination of parameters */ public static AliasUpdate makeAliasUpdate( List recreated, List added, Collection removed) { return factory.getAliasUpdate(recreated, added, removed); } /** * Creates new {@link AliasUpdate} that completely replaces all aliases. * * @param recreated * new list of aliases that completely replaces the old ones * @return new {@link AliasUpdate} * @throws NullPointerException * if the parameter or any alias is {@code null} * @throws IllegalArgumentException * if language codes are inconsistent or there are alias duplicates */ public static AliasUpdate makeAliasUpdate(List recreated) { Objects.requireNonNull(recreated, "New list of aliases must be provided."); return factory.getAliasUpdate(recreated, Collections.emptyList(), Collections.emptyList()); } /** * Creates new {@link AliasUpdate} that adds and/or removes some of the aliases. * It might be more convenient to use {@link AliasUpdateBuilder}. * * @param added * aliases to add * @param removed * aliases to remove * @return new {@link AliasUpdate} * @throws NullPointerException * if any parameter or any alias is {@code null} * @throws IllegalArgumentException * if language codes are inconsistent or there are alias duplicates */ public static AliasUpdate makeAliasUpdate( List added, Collection removed) { return factory.getAliasUpdate(null, added, removed); } /** * Creates new {@link StatementUpdate}. It might be more convenient to use * {@link StatementUpdateBuilder}. * * @param added * added statements * @param replaced * replaced statements * @param removed * IDs of removed statements * @return new {@link StatementUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static StatementUpdate makeStatementUpdate( Collection added, Collection replaced, Collection removed) { return factory.getStatementUpdate(added, replaced, removed); } /** * Creates new {@link SenseUpdate}. It might be more convenient to use * {@link SenseUpdateBuilder}. * * @param entityId * ID of the sense that is to be updated * @param revisionId * base sense revision to be updated or zero if not available * @param glosses * changes in sense glosses or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @return new {@link SenseUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static SenseUpdate makeSenseUpdate( SenseIdValue entityId, long revisionId, TermUpdate glosses, StatementUpdate statements) { return factory.getSenseUpdate(entityId, revisionId, glosses, statements); } /** * Creates new {@link FormUpdate}. It might be more convenient to use * {@link FormUpdateBuilder}. * * @param entityId * ID of the form that is to be updated * @param revisionId * base form revision to be updated or zero if not available * @param representations * changes in form representations or {@code null} for no change * @param grammaticalFeatures * new grammatical features of the form or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @return new {@link FormUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static FormUpdate makeFormUpdate( FormIdValue entityId, long revisionId, TermUpdate representations, Collection grammaticalFeatures, StatementUpdate statements) { return factory.getFormUpdate(entityId, revisionId, representations, grammaticalFeatures, statements); } /** * Creates new {@link LexemeUpdate}. It might be more convenient to use * {@link LexemeUpdateBuilder}. * * @param entityId * ID of the lexeme that is to be updated * @param revisionId * base lexeme revision to be updated or zero if not available * @param language * new lexeme language or {@code null} for no change * @param lexicalCategory * new lexical category of the lexeme or {@code null} for no change * @param lemmas * changes in lemmas or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @param addedSenses * added senses * @param updatedSenses * updated senses * @param removedSenses * IDs of removed senses * @param addedForms * added forms * @param updatedForms * updated forms * @param removedForms * IDs of removed forms * @return new {@link LexemeUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static LexemeUpdate makeLexemeUpdate( LexemeIdValue entityId, long revisionId, ItemIdValue language, ItemIdValue lexicalCategory, TermUpdate lemmas, StatementUpdate statements, Collection addedSenses, Collection updatedSenses, Collection removedSenses, Collection addedForms, Collection updatedForms, Collection removedForms) { return factory.getLexemeUpdate(entityId, revisionId, language, lexicalCategory, lemmas, statements, addedSenses, updatedSenses, removedSenses, addedForms, updatedForms, removedForms); } /** * Creates new {@link MediaInfoUpdate}. It might be more convenient to use * {@link MediaInfoUpdateBuilder}. * * @param entityId * ID of the media that is to be updated * @param revisionId * base media revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @return new {@link MediaInfoUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static MediaInfoUpdate makeMediaInfoUpdate( MediaInfoIdValue entityId, long revisionId, TermUpdate labels, StatementUpdate statements) { return factory.getMediaInfoUpdate(entityId, revisionId, labels, statements); } /** * Creates new {@link ItemUpdate}. It might be more convenient to use * {@link ItemUpdateBuilder}. * * @param entityId * ID of the item that is to be updated * @param revisionId * base item revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions or {@code null} for no change * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @param modifiedSiteLinks * added or replaced site links * @param removedSiteLinks * site keys of removed site links * @return new {@link ItemUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static ItemUpdate makeItemUpdate( ItemIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements, Collection modifiedSiteLinks, Collection removedSiteLinks) { return factory.getItemUpdate(entityId, revisionId, labels, descriptions, aliases, statements, modifiedSiteLinks, removedSiteLinks); } /** * Creates new {@link PropertyUpdate}. It might be more convenient to use * {@link PropertyUpdateBuilder}. * * @param entityId * ID of the property entity that is to be updated * @param revisionId * base property revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions or {@code null} for no change * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @return new {@link PropertyUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public static PropertyUpdate makePropertyUpdate( PropertyIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements) { return factory.getPropertyUpdate(entityId, revisionId, labels, descriptions, aliases, statements); } } DatamodelConverter.java000066400000000000000000000470311444772566300356450ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SnakVisitor; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; /** * Class to re-create data model objects using a specified factory. This is * provided in place of having copy constructors in each and every * implementation of the data model. Note that data model objects are usually * immutable and do not need to be copied. The use of this class is to convert * data objects to a specific implementation, as might be needed for some * specialized purpose (e.g., for JSON export). * * @author Markus Kroetzsch * */ public class DatamodelConverter implements SnakVisitor, ValueVisitor { static final Logger logger = LoggerFactory .getLogger(DatamodelConverter.class); /** * The factory to use for copying. */ private final DataObjectFactory dataObjectFactory; /** * Constructor. * * @param dataObjectFactory * the factory to use for creating new objects */ public DatamodelConverter(DataObjectFactory dataObjectFactory) { this.dataObjectFactory = dataObjectFactory; } /** * Copies an {@link ItemIdValue}. * * @param object * object to copy * @return copied object */ public ItemIdValue copy(ItemIdValue object) { return dataObjectFactory.getItemIdValue(object.getId(), object.getSiteIri()); } /** * Copies a {@link PropertyIdValue}. * * @param object * object to copy * @return copied object */ public PropertyIdValue copy(PropertyIdValue object) { return dataObjectFactory.getPropertyIdValue(object.getId(), object.getSiteIri()); } /** * Copies a {@link LexemeIdValue}. * * @param object * object to copy * @return copied object */ public LexemeIdValue copy(LexemeIdValue object) { return dataObjectFactory.getLexemeIdValue(object.getId(), object.getSiteIri()); } /** * Copies a {@link FormIdValue}. * * @param object * object to copy * @return copied object */ public FormIdValue copy(FormIdValue object) { return dataObjectFactory.getFormIdValue(object.getId(), object.getSiteIri()); } /** * Copies a {@link SenseIdValue}. * * @param object * object to copy * @return copied object */ public SenseIdValue copy(SenseIdValue object) { return dataObjectFactory.getSenseIdValue(object.getId(), object.getSiteIri()); } /** * Copies a {@link MediaInfoIdValue}. * * @param object * object to copy * @return copied object */ public MediaInfoIdValue copy(MediaInfoIdValue object) { return dataObjectFactory.getMediaInfoIdValue(object.getId(), object.getSiteIri()); } /** * Copies a {@link DatatypeIdValue}. * * @param object * object to copy * @return copied object */ public DatatypeIdValue copy(DatatypeIdValue object) { return dataObjectFactory.getDatatypeIdValueFromJsonId(object.getJsonString()); } /** * Copies a {@link TimeValue}. * * @param object * object to copy * @return copied object */ public TimeValue copy(TimeValue object) { return dataObjectFactory.getTimeValue(object.getYear(), object.getMonth(), object.getDay(), object.getHour(), object.getMinute(), object.getSecond(), object.getPrecision(), object.getBeforeTolerance(), object.getAfterTolerance(), object.getTimezoneOffset(), object.getPreferredCalendarModel()); } /** * Copies a {@link GlobeCoordinatesValue}. * * @param object * object to copy * @return copied object */ public GlobeCoordinatesValue copy(GlobeCoordinatesValue object) { return dataObjectFactory.getGlobeCoordinatesValue( object.getLatitude(), object.getLongitude(), object.getPrecision(), object.getGlobe()); } /** * Copies a {@link StringValue}. * * @param object * object to copy * @return copied object */ public StringValue copy(StringValue object) { return dataObjectFactory.getStringValue(object.getString()); } /** * Copies a {@link MonolingualTextValue} * @param object * object to copy * @return copied object */ public MonolingualTextValue copy(MonolingualTextValue object) { return dataObjectFactory.getMonolingualTextValue(object.getText(), object.getLanguageCode()); } /** * Copies a {@link QuantityValue}. * * @param object * object to copy * @return copied object */ public QuantityValue copy(QuantityValue object) { return dataObjectFactory.getQuantityValue( object.getNumericValue(), object.getLowerBound(), object.getUpperBound(), object.getUnitItemId()); } /** * Copies an {@link UnsupportedValue}. * * @param object * object to copy * @return copied object */ public UnsupportedValue copy(UnsupportedValue object) { // unsupported values cannot be copied! return object; } /** * Copies a {@link Snak}. * * @param snak * object to copy * @return the copied object */ private Snak copy(Snak snak) { if (snak instanceof ValueSnak) { return copy((ValueSnak) snak); } else if (snak instanceof NoValueSnak) { return copy((NoValueSnak) snak); } else if (snak instanceof SomeValueSnak) { return copy((SomeValueSnak) snak); } else { throw new IllegalArgumentException( "I don't know how to copy snaks of type " + snak.getClass()); } } /** * Copies a {@link ValueSnak}. * * @param object * object to copy * @return copied object */ public ValueSnak copy(ValueSnak object) { return dataObjectFactory.getValueSnak(copy(object.getPropertyId()), copyValue(object.getValue())); } /** * Copies a {@link SomeValueSnak}. * * @param object * object to copy * @return copied object */ public SomeValueSnak copy(SomeValueSnak object) { return dataObjectFactory.getSomeValueSnak(copy(object.getPropertyId())); } /** * Copies a {@link NoValueSnak}. * * @param object * object to copy * @return copied object */ public NoValueSnak copy(NoValueSnak object) { return dataObjectFactory.getNoValueSnak(copy(object.getPropertyId())); } /** * Copies a {@link SnakGroup}. * * @param object * object to copy * @return copied object */ public SnakGroup copy(SnakGroup object) { List snaks = new ArrayList<>(object.size()); for (Snak snak : object) { snaks.add(copy(snak)); } return dataObjectFactory.getSnakGroup(snaks); } /** * Copies a {@link Claim}. * * @param object * object to copy * @return copied object */ public Claim copy(Claim object) { return dataObjectFactory.getClaim( (EntityIdValue) visit(object.getSubject()), copy(object.getMainSnak()), copy(object.getQualifiers())); } /** * Copies a {@link Reference}. * * @param object * object to copy * @return copied object */ public Reference copy(Reference object) { return dataObjectFactory.getReference(copy(object.getSnakGroups())); } /** * Copies a {@link Statement}. * * @param object * object to copy * @return copied object */ public Statement copy(Statement object) { return dataObjectFactory.getStatement( (EntityIdValue) visit(object.getSubject()), copy(object.getMainSnak()), copy(object.getQualifiers()), copyReferences(object.getReferences()), object.getRank(), object.getStatementId()); } /** * Copies a {@link StatementGroup}. * @param object * object to copy * @return copied object */ public StatementGroup copy(StatementGroup object) { List statements = new ArrayList<>(object.getStatements().size()); for (Statement statement : object.getStatements()) { statements.add(copy(statement)); } return dataObjectFactory.getStatementGroup(statements); } /** * Copies a {@link SiteLink}. * * @param object * object to copy * @return copied object */ public SiteLink copy(SiteLink object) { return dataObjectFactory.getSiteLink(object.getPageTitle(), object.getSiteKey(), object.getBadges()); } /** * Copies a {@link PropertyDocument}. * * @param object * object to copy * @return copied object */ public PropertyDocument copy(PropertyDocument object) { return dataObjectFactory.getPropertyDocument( copy(object.getEntityId()), copyMonoLingualTextValues(object.getLabels().values()), copyMonoLingualTextValues(object.getDescriptions().values()), copyAliasMap(object.getAliases()), copyStatementGroups(object.getStatementGroups()), copy(object.getDatatype()), object.getRevisionId()); } /** * Copies an {@link ItemDocument}. * * @param object * object to copy * @return copied object */ public ItemDocument copy(ItemDocument object) { return dataObjectFactory.getItemDocument( copy(object.getEntityId()), copyMonoLingualTextValues(object.getLabels().values()), copyMonoLingualTextValues(object.getDescriptions().values()), copyAliasMap(object.getAliases()), copyStatementGroups(object.getStatementGroups()), copySiteLinks(object.getSiteLinks()), object.getRevisionId()); } /** * Copies a {@link MediaInfoDocument}. * * @param object * object to copy * @return copied object */ public MediaInfoDocument copy(MediaInfoDocument object) { return dataObjectFactory.getMediaInfoDocument( copy(object.getEntityId()), copyMonoLingualTextValues(object.getLabels().values()), copyStatementGroups(object.getStatementGroups()), object.getRevisionId()); } /** * Copies a {@link LexemeDocument}. * * @param object * object to copy * @return copied object */ public LexemeDocument copy(LexemeDocument object) { return dataObjectFactory.getLexemeDocument( copy(object.getEntityId()), copy(object.getLexicalCategory()), copy(object.getLanguage()), copyMonoLingualTextValues(object.getLemmas().values()), copyStatementGroups(object.getStatementGroups()), copyFormDocuments(object.getForms()), copySenseDocuments(object.getSenses()), object.getRevisionId()); } /** * Copies a {@link FormDocument}. * * @param object * object to copy * @return copied object */ public FormDocument copy(FormDocument object) { return dataObjectFactory.getFormDocument( copy(object.getEntityId()), copyMonoLingualTextValues(object.getRepresentations().values()), copyItemIds(object.getGrammaticalFeatures()), copyStatementGroups(object.getStatementGroups()), object.getRevisionId()); } /** * Copies a {@link SenseDocument}. * * @param object * object to copy * @return copied object */ public SenseDocument copy(SenseDocument object) { return dataObjectFactory.getSenseDocument( copy(object.getEntityId()), copyMonoLingualTextValues(object.getGlosses().values()), copyStatementGroups(object.getStatementGroups()), object.getRevisionId()); } /** * Copies a {@link Snak}. * * @param snak * object to copy * @return copied object */ public Snak copySnak(Snak snak) { return snak.accept(this); } @Override public Snak visit(ValueSnak snak) { return copy(snak); } @Override public Snak visit(SomeValueSnak snak) { return copy(snak); } @Override public Snak visit(NoValueSnak snak) { return copy(snak); } /** * Copies a {@link Value}. * * @param value * object to copy * @return copied object */ public Value copyValue(Value value) { return value.accept(this); } @Override public Value visit(EntityIdValue value) { if (value instanceof ItemIdValue) { return copy((ItemIdValue) value); } else if (value instanceof PropertyIdValue) { return copy((PropertyIdValue) value); } else if (value instanceof LexemeIdValue) { return copy((LexemeIdValue) value); } else if (value instanceof FormIdValue) { return copy((FormIdValue) value); } else if (value instanceof SenseIdValue) { return copy((SenseIdValue) value); } else if (value instanceof MediaInfoIdValue) { return copy((MediaInfoIdValue) value); } else { throw new UnsupportedOperationException( "Cannot convert entity id value: " + value.getClass()); } } @Override public Value visit(GlobeCoordinatesValue value) { return copy(value); } @Override public Value visit(MonolingualTextValue value) { return copy(value); } @Override public Value visit(QuantityValue value) { return copy(value); } @Override public Value visit(StringValue value) { return copy(value); } @Override public Value visit(TimeValue value) { return copy(value); } @Override public Value visit(UnsupportedValue value) { return copy(value); } /** * Converts a map of language keys to lists of {@link MonolingualTextValue} * objects to a flat list of such objects, as required for the factory * methods, where the values in the flat lists are new copies of the * original values. * * @param aliasMap * the map to convert * @return the flattened list with copied values */ private List copyAliasMap( Map> aliasMap) { List aliases = new ArrayList<>(); for (Entry> langAliases : aliasMap.entrySet()) { for (MonolingualTextValue mtv : langAliases.getValue()) { aliases.add(copy(mtv)); } } return aliases; } /** * Copies a list of {@link SnakGroup} objects. * * @param snakGroups * object to copy * @return the copied object */ private List copy(List snakGroups) { List result = new ArrayList<>(snakGroups.size()); for (SnakGroup snakGroup : snakGroups) { result.add(copy(snakGroup)); } return result; } /** * Copies a list of {@link Reference} objects. * * @param references * object to copy * @return the copied object */ private List copyReferences(List references) { List result = new ArrayList<>(references.size()); for (Reference reference : references) { result.add(dataObjectFactory .getReference(copy(reference.getSnakGroups()))); } return result; } /** * Copies a list of {@link StatementGroup} objects. * * @param statementGroups * object to copy * @return the copied object */ private List copyStatementGroups(List statementGroups) { List result = new ArrayList<>(statementGroups.size()); for (StatementGroup statementGroup : statementGroups) { result.add(copy(statementGroup)); } return result; } /** * Copies a collection of {@link MonolingualTextValue} objects * * @param monoLingualTextValues * object to copy * @return the copied object */ private List copyMonoLingualTextValues(Collection monoLingualTextValues) { List result = new ArrayList<>(monoLingualTextValues.size()); for (MonolingualTextValue mtv : monoLingualTextValues) { result.add(copy(mtv)); } return result; } /** * Copies a map of {@link SiteLink} objects. * * @param siteLinks * object to copy * @return the copied object */ private Map copySiteLinks(Map siteLinks) { Map result = new HashMap<>(siteLinks.size()); for (Entry entry : siteLinks.entrySet()) { result.put(entry.getKey(), copy(entry.getValue())); } return result; } /** * Copies a list of item ids. * * @param ids * @return */ private List copyItemIds(List ids) { return ids.stream() .map(id -> copy(id)) .collect(Collectors.toList()); } /** * Copies a list of sense documents. * * @param senses * @return */ private List copySenseDocuments(List senses) { return senses.stream() .map(sense -> copy(sense)) .collect(Collectors.toList()); } /** * Copies a list of form documents. * * @param forms * @return */ private List copyFormDocuments(List forms) { return forms.stream() .map(form -> copy(form)) .collect(Collectors.toList()); } } DatamodelFilter.java000066400000000000000000000127731444772566300351300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; /** * This is a utility class that allows to filter {@link EntityDocument} * using the data declared in a {@link DocumentDataFilter}. */ public class DatamodelFilter { private final DataObjectFactory dataObjectFactory; private final DocumentDataFilter filter; public DatamodelFilter(DataObjectFactory dataObjectFactory, DocumentDataFilter filter) { this.dataObjectFactory = dataObjectFactory; this.filter = filter; } public ItemDocument filter(ItemDocument item) { return dataObjectFactory.getItemDocument( item.getEntityId(), filterMonoLingualTextValues(item.getLabels().values()), filterMonoLingualTextValues(item.getDescriptions().values()), filterMonoLingualTextValues(flatten(item.getAliases().values())), filterStatementGroups(item.getStatementGroups()), filterSiteLinks(item.getSiteLinks()), item.getRevisionId() ); } public PropertyDocument filter(PropertyDocument property) { return dataObjectFactory.getPropertyDocument( property.getEntityId(), filterMonoLingualTextValues(property.getLabels().values()), filterMonoLingualTextValues(property.getDescriptions().values()), filterMonoLingualTextValues(flatten(property.getAliases().values())), filterStatementGroups(property.getStatementGroups()), property.getDatatype(), property.getRevisionId() ); } public MediaInfoDocument filter(MediaInfoDocument mediaInfo) { return dataObjectFactory.getMediaInfoDocument( mediaInfo.getEntityId(), filterMonoLingualTextValues(mediaInfo.getLabels().values()), filterStatementGroups(mediaInfo.getStatementGroups()), mediaInfo.getRevisionId() ); } public LexemeDocument filter(LexemeDocument lexeme) { return dataObjectFactory.getLexemeDocument( lexeme.getEntityId(), lexeme.getLexicalCategory(), lexeme.getLanguage(), filterMonoLingualTextValues(lexeme.getLemmas().values()), filterStatementGroups(lexeme.getStatementGroups()), filterForms(lexeme.getForms()), filterSenses(lexeme.getSenses()), lexeme.getRevisionId() ); } public FormDocument filter(FormDocument form) { return dataObjectFactory.getFormDocument( form.getEntityId(), filterMonoLingualTextValues(form.getRepresentations().values()), form.getGrammaticalFeatures(), filterStatementGroups(form.getStatementGroups()), form.getRevisionId() ); } public SenseDocument filter(SenseDocument sense) { return dataObjectFactory.getSenseDocument( sense.getEntityId(), filterMonoLingualTextValues(sense.getGlosses().values()), filterStatementGroups(sense.getStatementGroups()), sense.getRevisionId() ); } private List filterForms(List forms) { List filtered = new ArrayList<>(forms.size()); for(FormDocument form : forms) { filtered.add(filter(form)); } return filtered; } private List filterSenses(List senses) { List filtered = new ArrayList<>(senses.size()); for(SenseDocument sense : senses) { filtered.add(filter(sense)); } return filtered; } private List flatten(Collection> values) { List flattened = new ArrayList<>(); for(Collection part : values) { flattened.addAll(part); } return flattened; } private List filterMonoLingualTextValues(Collection values) { if (filter.getLanguageFilter() == null) { return new ArrayList<>(values); } if (filter.getLanguageFilter().isEmpty()) { return Collections.emptyList(); } List output = new ArrayList<>(); for(MonolingualTextValue value : values) { if (filter.getLanguageFilter().contains(value.getLanguageCode())) { output.add(value); } } return output; } private List filterStatementGroups(List statementGroups) { if (filter.getPropertyFilter() == null) { return statementGroups; } if (filter.getPropertyFilter().isEmpty()) { return Collections.emptyList(); } List output = new ArrayList<>(statementGroups.size()); for(StatementGroup statementGroup : statementGroups) { if(filter.getPropertyFilter().contains(statementGroup.getProperty())) { output.add(statementGroup); } } return output; } private Map filterSiteLinks(Map siteLinks) { if (filter.getSiteLinkFilter() == null) { return siteLinks; } if (filter.getSiteLinkFilter().isEmpty()) { return Collections.emptyMap(); } Map result = new HashMap<>(siteLinks.size()); for (Map.Entry entry : siteLinks.entrySet()) { if (filter.getSiteLinkFilter().contains(entry.getKey())) { result.put(entry.getKey(), entry.getValue()); } } return result; } } DatamodelMapper.java000066400000000000000000000032161444772566300351170ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; /** * Same as Jackson's celebrated ObjectMapper, except * that we add injections necessary to fill fields not * represented in JSON. * * @author antonin * */ public class DatamodelMapper extends ObjectMapper { private static final long serialVersionUID = -236841297410109272L; /** * Constructs a mapper with the given siteIri. This IRI * will be used to fill all the siteIris of the entity ids * contained in the payloads. * * @param siteIri * the ambient IRI of the Wikibase site */ public DatamodelMapper(String siteIri) { super(); InjectableValues injection = new InjectableValues.Std() .addValue("siteIri", siteIri); this.setInjectableValues(injection); /* * Support for Optional properties. */ registerModule(new Jdk8Module()); } } EntityDocumentBuilder.java000066400000000000000000000150041444772566300363400ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocument; /** * Abstract base class for builders that construct {@link EntityDocument} * objects. * * @author Markus Kroetzsch * * @param * the type of the eventual concrete builder implementation * @param * the type of the object that is being built */ public abstract class EntityDocumentBuilder, O extends TermedStatementDocument> extends AbstractDataObjectBuilder { EntityIdValue entityIdValue; final ArrayList labels = new ArrayList<>(); final ArrayList descriptions = new ArrayList<>(); final ArrayList aliases = new ArrayList<>(); final HashMap> statements = new HashMap<>(); long revisionId = 0; protected EntityDocumentBuilder(EntityIdValue entityIdValue) { this.entityIdValue = entityIdValue; } /** * Starts constructing an EntityDocument from an initial version * of this document. * * @param initialDocument * the initial version of the document to use */ protected EntityDocumentBuilder(O initialDocument) { this.entityIdValue = initialDocument.getEntityId(); this.revisionId = initialDocument.getRevisionId(); for(MonolingualTextValue label : initialDocument.getLabels().values()) { withLabel(label); } for(MonolingualTextValue description : initialDocument.getDescriptions().values()) { withDescription(description); } for(List aliases : initialDocument.getAliases().values()) { for(MonolingualTextValue alias : aliases) { withAlias(alias); } } Iterator iterator = initialDocument.getAllStatements(); while(iterator.hasNext()) { withStatement(iterator.next()); } } /** * Sets the revision id for the constructed document. See * {@link EntityDocument#getRevisionId()}. * * @param revisionId * the revision id * @return builder object to continue construction */ public T withRevisionId(long revisionId) { this.revisionId = revisionId; return getThis(); } /** * Changes the entity value id for the constructed document. * See {@link EntityDocument#getEntityId()}. * * @param entityId * the entity id * @return builder object to continue construction */ public T withEntityId(EntityIdValue entityId) { this.entityIdValue = entityId; return getThis(); } /** * Adds an additional label to the constructed document. * * @param mtv * the additional label * @return builder object to continue construction */ public T withLabel(MonolingualTextValue mtv) { this.labels.add(mtv); return getThis(); } /** * Adds an additional label to the constructed document. * * @param text * the text of the label * @param languageCode * the language code of the label * @return builder object to continue construction */ public T withLabel(String text, String languageCode) { withLabel(factory.getMonolingualTextValue(text, languageCode)); return getThis(); } /** * Adds an additional description to the constructed document. * * @param mtv * the additional description * @return builder object to continue construction */ public T withDescription(MonolingualTextValue mtv) { this.descriptions.add(mtv); return getThis(); } /** * Adds an additional description to the constructed document. * * @param text * the text of the description * @param languageCode * the language code of the description * @return builder object to continue construction */ public T withDescription(String text, String languageCode) { withDescription(factory.getMonolingualTextValue(text, languageCode)); return getThis(); } /** * Adds an additional alias to the constructed document. * * @param mtv * the additional alias * @return builder object to continue construction */ public T withAlias(MonolingualTextValue mtv) { this.aliases.add(mtv); return getThis(); } /** * Adds an additional alias to the constructed document. * * @param text * the text of the alias * @param languageCode * the language code of the alias * @return builder object to continue construction */ public T withAlias(String text, String languageCode) { withAlias(factory.getMonolingualTextValue(text, languageCode)); return getThis(); } /** * Adds an additional statement to the constructed document. * * @param statement * the additional statement * @return builder object to continue construction */ public T withStatement(Statement statement) { PropertyIdValue pid = statement.getMainSnak().getPropertyId(); if(!statements.containsKey(pid)) { statements.put(pid, new ArrayList<>()); } ArrayList pidStatements = statements.get(pid); pidStatements.add(statement); return getThis(); } /** * Returns a list of {@link StatementGroup} objects for the currently stored * statements. * * @return */ protected List getStatementGroups() { ArrayList result = new ArrayList<>( this.statements.size()); for (ArrayList statementList : this.statements.values()) { result.add(factory.getStatementGroup(statementList)); } return result; } } EntityUpdateBuilder.java000066400000000000000000000164351444772566300360150ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Objects; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; /** * Builder for incremental construction of {@link EntityUpdate} objects. */ public abstract class EntityUpdateBuilder { private final EntityIdValue entityId; private final long baseRevisionId; private final EntityDocument baseRevision; /** * Initializes new builder object for constructing update of entity with given * ID. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is as placeholder ID */ protected EntityUpdateBuilder(EntityIdValue entityId, long revisionId) { Objects.requireNonNull(entityId, "Entity ID cannot be null."); Validate.isTrue(!entityId.isPlaceholder(), "Cannot create update for placeholder entity ID."); this.entityId = entityId; this.baseRevisionId = revisionId; baseRevision = null; } /** * Initializes new builder object for constructing update of given base entity * revision. * * @param revision * base entity revision to be updated * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has a placeholder ID */ protected EntityUpdateBuilder(EntityDocument revision) { Objects.requireNonNull(revision, "Base entity revision cannot be null."); Validate.isTrue(!revision.getEntityId().isPlaceholder(), "Cannot create update for placeholder entity ID."); entityId = revision.getEntityId(); baseRevision = revision; baseRevisionId = baseRevision.getRevisionId(); } /** * Creates new builder object for constructing update of entity with given * revision ID. *

* Supported entity IDs include {@link ItemIdValue}, {@link PropertyIdValue}, * {@link LexemeIdValue}, {@link FormIdValue}, {@link SenseIdValue}, and * {@link MediaInfoIdValue}. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it's a placeholder * ID */ public static EntityUpdateBuilder forBaseRevisionId(EntityIdValue entityId, long revisionId) { return StatementDocumentUpdateBuilder.forBaseRevisionId(entityId, revisionId); } /** * Creates new builder object for constructing update of entity with given ID. *

* Supported entity IDs include {@link ItemIdValue}, {@link PropertyIdValue}, * {@link LexemeIdValue}, {@link FormIdValue}, {@link SenseIdValue}, and * {@link MediaInfoIdValue}. * * @param entityId * ID of the entity that is to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it's a placeholder * ID */ public static EntityUpdateBuilder forEntityId(EntityIdValue entityId) { return forBaseRevisionId(entityId, 0); } /** * Creates new builder object for constructing update of given base entity * revision. Provided entity document might not represent the latest revision of * the entity as currently stored in Wikibase. It will be used for validation in * builder methods. If the document has revision ID, it will be used to detect * edit conflicts. *

* Supported entity types include {@link ItemDocument}, * {@link PropertyDocument}, {@link LexemeDocument}, {@link FormDocument}, * {@link SenseDocument}, and {@link MediaInfoDocument}. * * @param revision * base entity revision to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} is of unrecognized type or its ID is a * placeholder ID */ public static EntityUpdateBuilder forBaseRevision(EntityDocument revision) { Objects.requireNonNull(revision, "Base entity revision cannot be null."); if (revision instanceof StatementDocument) { return StatementDocumentUpdateBuilder.forBaseRevision((StatementDocument) revision); } throw new IllegalArgumentException("Unrecognized entity document type."); } /** * Returns ID of the entity that is being updated. * * @return ID of the updated entity */ EntityIdValue getEntityId() { return entityId; } /** * Returns base entity revision, upon which this update is built. If no base * revision was provided when this builder was constructed, this method returns * {@code null}. * * @return base entity revision that is being updated */ EntityDocument getBaseRevision() { return baseRevision; } /** * Returns base entity revision ID, upon which this update is built. If no base * revision nor base revision ID was provided when this builder was constructed, * this method returns zero. * * @return base entity revision ID */ long getBaseRevisionId() { return baseRevisionId; } /** * Creates new {@link EntityUpdate} object with contents of this builder object. * * @return constructed object */ public abstract EntityUpdate build(); } Equality.java000066400000000000000000000707531444772566300336670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.Objects; /** * Static class for checking the equality of arbitrary data objects using only * their interfaces. This can be used to implement the equals() method of * arbitrary interface implementations. More efficient solutions might exist if * the object that implements an interface is of a specific known type, but the * methods here could always be used as a fallback or default. *

* Note that it is desired that different implementations of the same interface * are treated as equal if they contain the same data. * * @author Markus Kroetzsch * */ public class Equality { /** * Returns {@code true} if the parameters are two {@link EntityIdValue} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsEntityIdValue(EntityIdValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof EntityIdValue)) { return false; } EntityIdValue other = (EntityIdValue) o2; return o1.getId().equals(other.getId()) && o1.getSiteIri().equals(other.getSiteIri()) && o1.getEntityType().equals(other.getEntityType()); } /** * Returns {@code true} if the parameters are two {@link DatatypeIdValue} objects * with exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsDatatypeIdValue(DatatypeIdValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof DatatypeIdValue && o1.getIri().equals(((DatatypeIdValue) o2).getIri()) && o1.getJsonString().equals(((DatatypeIdValue) o2).getJsonString()); } /** * Returns {@code true} if the parameters are two {@link TimeValue} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsTimeValue(TimeValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof TimeValue)) { return false; } TimeValue other = (TimeValue) o2; return o1.getYear() == other.getYear() && o1.getMonth() == other.getMonth() && o1.getDay() == other.getDay() && o1.getHour() == other.getHour() && o1.getMinute() == other.getMinute() && o1.getSecond() == other.getSecond() && o1.getPrecision() == other.getPrecision() && o1.getBeforeTolerance() == other.getBeforeTolerance() && o1.getAfterTolerance() == other.getAfterTolerance() && o1.getTimezoneOffset() == other.getTimezoneOffset() && o1.getPreferredCalendarModel().equals( other.getPreferredCalendarModel()); } /** * Returns {@code true} if the parameters are two {@link GlobeCoordinatesValue} * objects with exactly the same data. It does not matter if they are * different implementations of the interface as long as their content is * the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsGlobeCoordinatesValue(GlobeCoordinatesValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof GlobeCoordinatesValue)) { return false; } GlobeCoordinatesValue other = (GlobeCoordinatesValue) o2; return o1.getLatitude() == other.getLatitude() && o1.getLongitude() == other.getLongitude() && o1.getPrecision() == other.getPrecision() && o1.getGlobe().equals(other.getGlobe()); } /** * Returns {@code true} if the parameters are two {@link StringValue} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsStringValue(StringValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof StringValue && o1.getString().equals(((StringValue) o2).getString()); } /** * Returns {@code true} if the parameters are two {@link MonolingualTextValue} * objects with exactly the same data. It does not matter if they are * different implementations of the interface as long as their content is * the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsMonolingualTextValue(MonolingualTextValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof MonolingualTextValue)) { return false; } MonolingualTextValue other = (MonolingualTextValue) o2; return o1.getText().equals(other.getText()) && o1.getLanguageCode().equals(other.getLanguageCode()); } /** * Returns {@code true} if the parameters are two {@link QuantityValue} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsQuantityValue(QuantityValue o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof QuantityValue)) { return false; } QuantityValue other = (QuantityValue) o2; return o1.getNumericValue().equals(other.getNumericValue()) && Objects.equals(o1.getLowerBound(), other.getLowerBound()) && Objects.equals(o1.getUpperBound(), other.getUpperBound()) && o1.getUnit().equals(other.getUnit()); } /** * Returns {@code true} if the parameters are two {@link ValueSnak} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsValueSnak(ValueSnak o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof ValueSnak && o1.getPropertyId().equals(((ValueSnak) o2).getPropertyId()) && o1.getValue().equals(((ValueSnak) o2).getValue()); } /** * Returns {@code true} if the parameters are two {@link SomeValueSnak} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsSomeValueSnak(SomeValueSnak o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof SomeValueSnak && o1.getPropertyId().equals(((SomeValueSnak) o2).getPropertyId()); } /** * Returns {@code true} if the parameters are two {@link NoValueSnak} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsNoValueSnak(NoValueSnak o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof NoValueSnak && o1.getPropertyId().equals(((NoValueSnak) o2).getPropertyId()); } /** * Returns {@code true} if the parameters are two {@link SnakGroup} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsSnakGroup(SnakGroup o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof SnakGroup && o1.getSnaks().equals(((SnakGroup) o2).getSnaks()); } /** * Returns {@code true} if the parameters are two {@link Claim} objects with exactly * the same data. It does not matter if they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsClaim(Claim o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof Claim)) { return false; } Claim other = (Claim) o2; return o1.getSubject().equals(other.getSubject()) && o1.getMainSnak().equals(other.getMainSnak()) && o1.getQualifiers().equals(other.getQualifiers()); } /** * Returns {@code true} if the parameters are two {@link Reference} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsReference(Reference o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof Reference && o1.getSnakGroups().equals(((Reference) o2).getSnakGroups()); } /** * Returns {@code true} if the parameters are two {@link Statement} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsStatement(Statement o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof Statement)) { return false; } Statement other = (Statement) o2; return o1.getSubject().equals(other.getSubject()) && o1.getMainSnak().equals(other.getMainSnak()) && o1.getQualifiers().equals(other.getQualifiers()) && o1.getReferences().equals(other.getReferences()) && o1.getRank() == other.getRank() && o1.getStatementId().equals(other.getStatementId()); } /** * Returns {@code true} if the parameters are two {@link StatementGroup} objects * with exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * Note that this includes the statement id, so that two statement objects * that "say the same thing" might still be unequal if they have different * ids. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsStatementGroup(StatementGroup o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } return o2 instanceof StatementGroup && o1.getStatements().equals(((StatementGroup) o2).getStatements()); } /** * Returns {@code true} if the parameters are two {@link SiteLink} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsSiteLink(SiteLink o1, Object o2) { if (o2 == null) { return false; } if (o2 == o1) { return true; } if (!(o2 instanceof SiteLink)) { return false; } SiteLink other = (SiteLink) o2; return o1.getPageTitle().equals(other.getPageTitle()) && o1.getSiteKey().equals(other.getSiteKey()) && o1.getBadges().equals(other.getBadges()); } /** * Returns {@code true} if the parameters are two {@link PropertyDocument} objects * with exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsPropertyDocument(PropertyDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof PropertyDocument)) { return false; } PropertyDocument other = (PropertyDocument) o2; // Note: property id already compared by equalsTermedDocument() return equalsTermedDocument(o1, other) && o1.getDatatype().equals(other.getDatatype()) && o1.getStatementGroups().equals(other.getStatementGroups()); } /** * Returns {@code true} if the parameters are two {@link ItemDocument} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsItemDocument(ItemDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof ItemDocument)) { return false; } ItemDocument other = (ItemDocument) o2; // Note: item id already compared by equalsTermedDocument() return equalsTermedDocument(o1, other) && o1.getSiteLinks().equals(other.getSiteLinks()) && o1.getStatementGroups().equals(other.getStatementGroups()); } /** * Returns {@code true} if the parameters are two {@link LexemeDocument} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsLexemeDocument(LexemeDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof LexemeDocument)) { return false; } LexemeDocument other = (LexemeDocument) o2; return o1.getEntityId().equals(other.getEntityId()) && o1.getLanguage().equals(other.getLanguage()) && o1.getLexicalCategory().equals(other.getLexicalCategory()) && o1.getLemmas().equals(other.getLemmas()) && o1.getStatementGroups().equals(other.getStatementGroups()) && o1.getForms().equals(other.getForms()) && o1.getSenses().equals(other.getSenses()) && (o1.getRevisionId() == other.getRevisionId()); } /** * Returns {@code true} if the parameters are two {@link FormDocument} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsFormDocument(FormDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof FormDocument)) { return false; } FormDocument other = (FormDocument) o2; return o1.getEntityId().equals(other.getEntityId()) && o1.getGrammaticalFeatures().equals(other.getGrammaticalFeatures()) && o1.getRepresentations().equals(other.getRepresentations()) && o1.getStatementGroups().equals(other.getStatementGroups()) && (o1.getRevisionId() == other.getRevisionId()); } /** * Returns {@code true} if the parameters are two {@link SenseDocument} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsSenseDocument(SenseDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof SenseDocument)) { return false; } SenseDocument other = (SenseDocument) o2; return o1.getEntityId().equals(other.getEntityId()) && o1.getGlosses().equals(other.getGlosses()) && o1.getStatementGroups().equals(other.getStatementGroups()) && (o1.getRevisionId() == other.getRevisionId()); } /** * Returns {@code true} if the parameters are two {@link MediaInfoDocument} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsMediaInfoDocument(MediaInfoDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof MediaInfoDocument)) { return false; } MediaInfoDocument other = (MediaInfoDocument) o2; return o1.getEntityId().equals(other.getEntityId()) && o1.getLabels().equals(other.getLabels()) && o1.getStatementGroups().equals(other.getStatementGroups()) && (o1.getRevisionId() == other.getRevisionId()); } private static boolean equalsTermedDocument(TermedDocument o1, TermedDocument other) { return o1.getEntityId().equals(other.getEntityId()) && o1.getAliases().equals(other.getAliases()) && o1.getDescriptions().equals(other.getDescriptions()) && o1.getLabels().equals(other.getLabels()) && (o1.getRevisionId() == other.getRevisionId()); } /** * Returns {@code true} if the parameters are two {@link EntityRedirectDocument} objects with * exactly the same data. It does not matter if they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsEntityRedirectDocument(EntityRedirectDocument o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof EntityRedirectDocument)) { return false; } EntityRedirectDocument other = (EntityRedirectDocument) o2; return o1.getEntityId().equals(other.getEntityId()) && o1.getTargetId().equals(other.getTargetId()) && o1.getRevisionId() == other.getRevisionId(); } /** * Returns {@code true} if the two {@link TermUpdate} objects contain exactly * the same data. It does not matter whether they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsTermUpdate(TermUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof TermUpdate)) { return false; } TermUpdate other = (TermUpdate) o2; return Objects.equals(o1.getModified(), other.getModified()) && Objects.equals(o1.getRemoved(), other.getRemoved()); } /** * Returns {@code true} if the two {@link AliasUpdate} objects contain exactly * the same data. It does not matter whether they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsAliasUpdate(AliasUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof AliasUpdate)) { return false; } AliasUpdate other = (AliasUpdate) o2; return Objects.equals(o1.getRecreated(), other.getRecreated()) && Objects.equals(o1.getAdded(), other.getAdded()) && Objects.equals(o1.getRemoved(), other.getRemoved()); } /** * Returns {@code true} if the two {@link StatementUpdate} objects contain * exactly the same data. It does not matter whether they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsStatementUpdate(StatementUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof StatementUpdate)) { return false; } StatementUpdate other = (StatementUpdate) o2; return Objects.equals(o1.getAdded(), other.getAdded()) && Objects.equals(o1.getReplaced(), other.getReplaced()) && Objects.equals(o1.getRemoved(), other.getRemoved()); } private static boolean equalsEntityUpdate(EntityUpdate o1, EntityUpdate o2) { return Objects.equals(o1.getEntityId(), o2.getEntityId()) && o1.getBaseRevisionId() == o2.getBaseRevisionId(); } private static boolean equalsStatementDocumentUpdate(StatementDocumentUpdate o1, StatementDocumentUpdate o2) { return equalsEntityUpdate(o1, o2) && Objects.equals(o1.getStatements(), o2.getStatements()); } private static boolean equalsLabeledStatementDocumentUpdate( LabeledStatementDocumentUpdate o1, LabeledStatementDocumentUpdate o2) { return equalsStatementDocumentUpdate(o1, o2) && Objects.equals(o1.getLabels(), o2.getLabels()); } private static boolean equalsTermedStatementDocumentUpdate( TermedStatementDocumentUpdate o1, TermedStatementDocumentUpdate o2) { return equalsLabeledStatementDocumentUpdate(o1, o2) && Objects.equals(o1.getDescriptions(), o2.getDescriptions()) && Objects.equals(o1.getAliases(), o2.getAliases()); } /** * Returns {@code true} if the two {@link MediaInfoUpdate} objects contain * exactly the same data. It does not matter whether they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsMediaInfoUpdate(MediaInfoUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof MediaInfoUpdate)) { return false; } MediaInfoUpdate other = (MediaInfoUpdate) o2; return equalsLabeledStatementDocumentUpdate(o1, other); } /** * Returns {@code true} if the two {@link ItemUpdate} objects contain exactly * the same data. It does not matter whether they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsItemUpdate(ItemUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof ItemUpdate)) { return false; } ItemUpdate other = (ItemUpdate) o2; return equalsTermedStatementDocumentUpdate(o1, other) && Objects.equals(o1.getModifiedSiteLinks(), other.getModifiedSiteLinks()) && Objects.equals(o1.getRemovedSiteLinks(), other.getRemovedSiteLinks()); } /** * Returns {@code true} if the two {@link PropertyUpdate} objects contain * exactly the same data. It does not matter whether they are different * implementations of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsPropertyUpdate(PropertyUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof PropertyUpdate)) { return false; } PropertyUpdate other = (PropertyUpdate) o2; return equalsTermedStatementDocumentUpdate(o1, other); } /** * Returns {@code true} if the two {@link SenseUpdate} objects contain exactly * the same data. It does not matter whether they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsSenseUpdate(SenseUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof SenseUpdate)) { return false; } SenseUpdate other = (SenseUpdate) o2; return equalsStatementDocumentUpdate(o1, other) && Objects.equals(o1.getGlosses(), other.getGlosses()); } /** * Returns {@code true} if the two {@link FormUpdate} objects contain exactly * the same data. It does not matter whether they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsFormUpdate(FormUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof FormUpdate)) { return false; } FormUpdate other = (FormUpdate) o2; return equalsStatementDocumentUpdate(o1, other) && Objects.equals(o1.getRepresentations(), other.getRepresentations()) && Objects.equals(o1.getGrammaticalFeatures(), other.getGrammaticalFeatures()); } /** * Returns {@code true} if the two {@link LexemeUpdate} objects contain exactly * the same data. It does not matter whether they are different implementations * of the interface as long as their content is the same. * * @param o1 * the first object to compare * @param o2 * the second object to compare * @return {@code true} if both objects are equal */ public static boolean equalsLexemeUpdate(LexemeUpdate o1, Object o2) { if (o2 == o1) { return true; } if (!(o2 instanceof LexemeUpdate)) { return false; } LexemeUpdate other = (LexemeUpdate) o2; return equalsStatementDocumentUpdate(o1, other) && Objects.equals(o1.getLanguage(), other.getLanguage()) && Objects.equals(o1.getLexicalCategory(), other.getLexicalCategory()) && Objects.equals(o1.getLemmas(), other.getLemmas()) && Objects.equals(o1.getAddedSenses(), other.getAddedSenses()) && Objects.equals(o1.getUpdatedSenses(), other.getUpdatedSenses()) && Objects.equals(o1.getRemovedSenses(), other.getRemovedSenses()) && Objects.equals(o1.getAddedForms(), other.getAddedForms()) && Objects.equals(o1.getUpdatedForms(), other.getUpdatedForms()) && Objects.equals(o1.getRemovedForms(), other.getRemovedForms()); } } FormUpdateBuilder.java000066400000000000000000000160301444772566300354330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Collection; import java.util.HashSet; import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link FormUpdate} objects. */ public class FormUpdateBuilder extends StatementDocumentUpdateBuilder { private TermUpdate representations = TermUpdate.EMPTY; private Set grammaticalFeatures; private FormUpdateBuilder(FormIdValue formId, long revisionId) { super(formId, revisionId); } private FormUpdateBuilder(FormDocument revision) { super(revision); } /** * Creates new builder object for constructing update of form entity with given * revision ID. * * @param formId * ID of the form that is to be updated * @param revisionId * ID of the base form revision to be updated or zero if not * available * @return update builder object * @throws NullPointerException * if {@code formId} is {@code null} * @throws IllegalArgumentException * if {@code formId} is a placeholder ID */ public static FormUpdateBuilder forBaseRevisionId(FormIdValue formId, long revisionId) { return new FormUpdateBuilder(formId, revisionId); } /** * Creates new builder object for constructing update of form entity with given * ID. * * @param formId * ID of the form that is to be updated * @return update builder object * @throws NullPointerException * if {@code formId} is {@code null} * @throws IllegalArgumentException * if {@code formId} is a placeholder ID */ public static FormUpdateBuilder forEntityId(FormIdValue formId) { return new FormUpdateBuilder(formId, 0); } /** * Creates new builder object for constructing update of given base form entity * revision. Provided form document might not represent the latest revision of * the form entity as currently stored in Wikibase. It will be used for * validation in builder methods. If the document has revision ID, it will be * used to detect edit conflicts. * * @param revision * base form entity revision to be updated * @return update builder object * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ public static FormUpdateBuilder forBaseRevision(FormDocument revision) { return new FormUpdateBuilder(revision); } @Override FormIdValue getEntityId() { return (FormIdValue) super.getEntityId(); } @Override FormDocument getBaseRevision() { return (FormDocument) super.getBaseRevision(); } @Override public FormUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } /** * Updates form representations. If this method is called multiple times, * changes are accumulated. If base entity revision was provided, redundant * changes are silently ignored, resulting in empty update. * * @param update * changes in form representations * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public FormUpdateBuilder updateRepresentations(TermUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); TermUpdateBuilder combined = getBaseRevision() != null ? TermUpdateBuilder.forTerms(getBaseRevision().getRepresentations().values()) : TermUpdateBuilder.create(); combined.append(representations); combined.append(update); representations = combined.build(); return this; } /** * Sets grammatical features of the form. Any previously assigned grammatical * features are removed. To remove all grammatical features without replacement, * call this method with empty collection. If base entity revision was provided, * attempt to replace grammatical features with identical set is silently * ignored, resulting in empty update. * * @param features * new grammatical features of the form * @return {@code this} (fluent method) * @throws NullPointerException * if {@code features} or any of its items is {@code null} * @throws IllegalArgumentException * if any item ID in {@code features} is a placeholder ID or if * there are duplicate features */ public FormUpdateBuilder setGrammaticalFeatures(Collection features) { Objects.requireNonNull(features, "Collection of grammatical features cannot be null."); for (ItemIdValue id : features) { Objects.requireNonNull(id, "Grammatical feature IDs must not be null."); Validate.isTrue(!id.isPlaceholder(), "Grammatical feature ID cannot be a placeholder ID."); } Set set = new HashSet<>(features); Validate.isTrue(set.size() == features.size(), "Every grammatical feature must be unique."); if (getBaseRevision() != null && set.equals(new HashSet<>(getBaseRevision().getGrammaticalFeatures()))) { grammaticalFeatures = null; return this; } grammaticalFeatures = new HashSet<>(features); return this; } /** * Replays all changes in provided update into this builder object. Changes from * the update are added on top of changes already present in this builder * object. * * @param update * form update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if {@code update} cannot be applied to base entity revision (if * available) */ public FormUpdateBuilder append(FormUpdate update) { super.append(update); updateRepresentations(update.getRepresentations()); if (update.getGrammaticalFeatures().isPresent()) { setGrammaticalFeatures(update.getGrammaticalFeatures().get()); } return this; } @Override public FormUpdate build() { return Datamodel.makeFormUpdate(getEntityId(), getBaseRevisionId(), representations, grammaticalFeatures, statements); } } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/Hash.java000066400000000000000000000426101444772566300330230ustar00rootroot00000000000000/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Objects; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityRedirectDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermedDocument; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; /** * Static class for computing a hashcode of arbitrary data objects using only * their interfaces. This can be used to implement the hashCode() method of * arbitrary interface implementations. More efficient solutions might exist if * the object that implements an interface is of a specific known type, but the * methods here could always be used as a fallback or default. * * @author Markus Kroetzsch * */ public class Hash { /** * Prime number used to build hashes. */ private static final int PRIME = 31; /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(EntityIdValue o) { int result; result = o.getId().hashCode(); result = PRIME * result + o.getSiteIri().hashCode(); result = PRIME * result + o.getEntityType().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(DatatypeIdValue o) { return o.getIri().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(TimeValue o) { int result; result = Long.hashCode(o.getYear()); result = PRIME * result + o.getMonth(); result = PRIME * result + o.getDay(); result = PRIME * result + o.getHour(); result = PRIME * result + o.getMinute(); result = PRIME * result + o.getSecond(); result = PRIME * result + o.getPrecision(); result = PRIME * result + o.getBeforeTolerance(); result = PRIME * result + o.getAfterTolerance(); result = PRIME * result + o.getTimezoneOffset(); result = PRIME * result + o.getPreferredCalendarModel().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(GlobeCoordinatesValue o) { int result; result = o.getGlobe().hashCode(); long value; value = Double.valueOf(o.getLatitude()).hashCode(); result = PRIME * result + (int) (value ^ (value >>> 32)); value = Double.valueOf(o.getLongitude()).hashCode(); result = PRIME * result + (int) (value ^ (value >>> 32)); value = Double.valueOf(o.getPrecision()).hashCode(); result = PRIME * result + (int) (value ^ (value >>> 32)); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(StringValue o) { return o.getString().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(MonolingualTextValue o) { int result; result = o.getLanguageCode().hashCode(); result = PRIME * result + o.getText().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(QuantityValue o) { int result; result = o.getNumericValue().hashCode(); result = PRIME * result + o.getUnit().hashCode(); if(o.getLowerBound() != null) { result = PRIME * result + o.getLowerBound().hashCode(); } if(o.getUpperBound() != null) { result = PRIME * result + o.getUpperBound().hashCode(); } return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(ValueSnak o) { int result; result = o.getValue().hashCode(); result = PRIME * result + o.getPropertyId().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(SomeValueSnak o) { return o.getPropertyId().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(NoValueSnak o) { return o.getPropertyId().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(SnakGroup o) { return o.getSnaks().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(Claim o) { int result; result = o.getSubject().hashCode(); result = PRIME * result + o.getMainSnak().hashCode(); result = PRIME * result + o.getQualifiers().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(Reference o) { return o.getSnakGroups().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(Statement o) { int result; result = o.getSubject().hashCode(); result = PRIME * result + o.getMainSnak().hashCode(); result = PRIME * result + o.getQualifiers().hashCode(); result = PRIME * result + o.getReferences().hashCode(); result = PRIME * result + o.getRank().hashCode(); result = PRIME * result + o.getStatementId().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(StatementGroup o) { return o.getStatements().hashCode(); } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(SiteLink o) { int result; result = o.getBadges().hashCode(); result = PRIME * result + o.getPageTitle().hashCode(); result = PRIME * result + o.getSiteKey().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(PropertyDocument o) { int result; result = hashCodeForTermedDocument(o); result = PRIME * result + o.getStatementGroups().hashCode(); result = PRIME * result + o.getDatatype().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(ItemDocument o) { int result; result = hashCodeForTermedDocument(o); result = PRIME * result + o.getStatementGroups().hashCode(); result = PRIME * result + o.getSiteLinks().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(LexemeDocument o) { int result; result = o.getLexicalCategory().hashCode(); result = PRIME * result + o.getLanguage().hashCode(); result = PRIME * result + o.getLemmas().hashCode(); result = PRIME * result + Long.hashCode(o.getRevisionId()); result = PRIME * result + o.getStatementGroups().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(FormDocument o) { int result; result = o.getGrammaticalFeatures().hashCode(); result = PRIME * result + o.getRepresentations().hashCode(); result = PRIME * result + Long.hashCode(o.getRevisionId()); result = PRIME * result + o.getStatementGroups().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(SenseDocument o) { int result; result = o.getGlosses().hashCode(); result = PRIME * result + Long.hashCode(o.getRevisionId()); result = PRIME * result + o.getStatementGroups().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(MediaInfoDocument o) { int result; result = o.getLabels().hashCode(); result = PRIME * result + o.getStatementGroups().hashCode(); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ private static int hashCodeForTermedDocument(TermedDocument o) { int result; result = o.getAliases().hashCode(); result = PRIME * result + o.getDescriptions().hashCode(); result = PRIME * result + o.getLabels().hashCode(); result = PRIME * result + Long.hashCode(o.getRevisionId()); return result; } /** * Returns a hash code for the given object. * * @see java.lang.Object#hashCode() * @param o * the object to create a hash for * @return the hash code of the object */ public static int hashCode(EntityRedirectDocument o) { int result; result = o.getEntityId().hashCode(); result = PRIME * result + o.getTargetId().hashCode(); result = PRIME * result + Long.hashCode(o.getRevisionId()); return result; } /** * Calculates hash code for given {@link TermUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(TermUpdate o) { return Objects.hash(o.getModified(), o.getRemoved()); } /** * Calculates hash code for given {@link AliasUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(AliasUpdate o) { return Objects.hash(o.getRecreated(), o.getAdded(), o.getRemoved()); } /** * Calculates hash code for given {@link StatementUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(StatementUpdate o) { return Objects.hash(o.getAdded(), o.getReplaced(), o.getRemoved()); } private static int hashCodeForEntityUpdate(EntityUpdate o) { return Objects.hash(o.getEntityId(), o.getBaseRevisionId()); } private static int hashCodeForStatementDocumentUpdate(StatementDocumentUpdate o) { return hashCodeForEntityUpdate(o) * PRIME + Objects.hash(o.getStatements()); } private static int hashCodeForLabeledStatementDocumentUpdate(LabeledStatementDocumentUpdate o) { return hashCodeForStatementDocumentUpdate(o) * PRIME + Objects.hash(o.getLabels()); } private static int hashCodeForTermedStatementDocumentUpdate(TermedStatementDocumentUpdate o) { return hashCodeForLabeledStatementDocumentUpdate(o) * PRIME + Objects.hash(o.getDescriptions(), o.getAliases()); } /** * Calculates hash code for given {@link MediaInfoUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(MediaInfoUpdate o) { return hashCodeForLabeledStatementDocumentUpdate(o); } /** * Calculates hash code for given {@link ItemUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(ItemUpdate o) { return hashCodeForTermedStatementDocumentUpdate(o) * PRIME + Objects.hash(o.getModifiedSiteLinks(), o.getRemovedSiteLinks()); } /** * Calculates hash code for given {@link PropertyUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(PropertyUpdate o) { return hashCodeForTermedStatementDocumentUpdate(o); } /** * Calculates hash code for given {@link SenseUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(SenseUpdate o) { return hashCodeForStatementDocumentUpdate(o) * PRIME + Objects.hash(o.getGlosses()); } /** * Calculates hash code for given {@link FormUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(FormUpdate o) { return hashCodeForStatementDocumentUpdate(o) * PRIME + Objects.hash(o.getRepresentations(), o.getGrammaticalFeatures()); } /** * Calculates hash code for given {@link LexemeUpdate} object. * * @see Object#hashCode() * @param o * the object to create a hash for * @return object's hash code */ public static int hashCode(LexemeUpdate o) { return hashCodeForStatementDocumentUpdate(o) * PRIME + Objects.hash( o.getLanguage(), o.getLexicalCategory(), o.getLemmas(), o.getAddedSenses(), o.getUpdatedSenses(), o.getRemovedSenses(), o.getAddedForms(), o.getUpdatedForms(), o.getRemovedForms()); } } ItemDocumentBuilder.java000066400000000000000000000103501444772566300357610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Arrays; import java.util.HashMap; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; /** * Builder class to construct {@link ItemDocument} objects. * * @author Markus Kroetzsch * */ public class ItemDocumentBuilder extends EntityDocumentBuilder { private final HashMap siteLinks = new HashMap<>(); /** * Constructor to start the build from a blank item. * * @param itemIdValue */ protected ItemDocumentBuilder(ItemIdValue itemIdValue) { super(itemIdValue); } /** * Constructor to start the build from an existing item. * * @param initialDocument * the item to start the build from */ protected ItemDocumentBuilder(ItemDocument initialDocument) { super(initialDocument); for(SiteLink siteLink : initialDocument.getSiteLinks().values()) { withSiteLink(siteLink); } } /** * Starts the construction of an {@link ItemDocument} with the given id. * * @param itemIdValue * id of the newly constructed item document * @return builder object to continue construction */ public static ItemDocumentBuilder forItemId(ItemIdValue itemIdValue) { return new ItemDocumentBuilder(itemIdValue); } /** * Starts the construction of an {@link ItemDocument} from an existing value. * * @param initialDocument * the item to start the construction from * @return builder object to continue construction */ public static ItemDocumentBuilder fromItemDocument(ItemDocument initialDocument) { return new ItemDocumentBuilder(initialDocument); } /** * Returns the {@link ItemDocument} that has been built. * * @return constructed item document * @throws IllegalStateException * if the object was built already */ @Override public ItemDocument build() { prepareBuild(); return factory.getItemDocument((ItemIdValue) this.entityIdValue, this.labels, this.descriptions, this.aliases, getStatementGroups(), this.siteLinks, this.revisionId); } /** * Adds an additional site link to the constructed document. * * @param siteLink * the additional site link */ public ItemDocumentBuilder withSiteLink(SiteLink siteLink) { this.siteLinks.put(siteLink.getSiteKey(), siteLink); return this; } /** * Adds an additional site link to the constructed document. * * @param title * the title of the linked page * @param siteKey * identifier of the site, e.g., "enwiki" * @param badges * one or more badges */ public ItemDocumentBuilder withSiteLink(String title, String siteKey, ItemIdValue... badges) { withSiteLink(factory.getSiteLink(title, siteKey, Arrays.asList(badges))); return this; } /** * Changes the entity value id for the constructed document. * See {@link EntityDocument#getEntityId()}. * * @param entityId * the entity id, which must be an ItemIdValue * @return builder object to continue construction */ @Override public ItemDocumentBuilder withEntityId(EntityIdValue entityId) { if (!(entityId instanceof ItemIdValue)) { throw new IllegalArgumentException("The entity id of an ItemDocument must be an ItemIdValue."); } return super.withEntityId(entityId); } @Override protected ItemDocumentBuilder getThis() { return this; } } ItemUpdateBuilder.java000066400000000000000000000166611444772566300354400ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link ItemUpdate} objects. */ public class ItemUpdateBuilder extends TermedDocumentUpdateBuilder { private final Map modifiedSiteLinks = new HashMap<>(); private final Set removedSiteLinks = new HashSet<>(); private ItemUpdateBuilder(ItemIdValue itemId, long revisionId) { super(itemId, revisionId); } private ItemUpdateBuilder(ItemDocument revision) { super(revision); } /** * Creates new builder object for constructing update of item entity with given * revision ID. * * @param itemId * ID of the item entity that is to be updated * @param revisionId * ID of the base item revision to be updated or zero if not * available * @return update builder object * @throws NullPointerException * if {@code itemId} is {@code null} * @throws IllegalArgumentException * if {@code itemId} is a placeholder ID */ public static ItemUpdateBuilder forBaseRevisionId(ItemIdValue itemId, long revisionId) { return new ItemUpdateBuilder(itemId, revisionId); } /** * Creates new builder object for constructing update of item entity with given * ID. * * @param itemId * ID of the item entity that is to be updated * @return update builder object * @throws NullPointerException * if {@code itemId} is {@code null} * @throws IllegalArgumentException * if {@code itemId} is a placeholder ID */ public static ItemUpdateBuilder forEntityId(ItemIdValue itemId) { return new ItemUpdateBuilder(itemId, 0); } /** * Creates new builder object for constructing update of given base item entity * revision. Provided item document might not represent the latest revision of * the item entity as currently stored in Wikibase. It will be used for * validation in builder methods. If the document has revision ID, it will be * used to detect edit conflicts. * * @param revision * base item entity revision to be updated * @return update builder object * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ public static ItemUpdateBuilder forBaseRevision(ItemDocument revision) { return new ItemUpdateBuilder(revision); } @Override ItemIdValue getEntityId() { return (ItemIdValue) super.getEntityId(); } @Override ItemDocument getBaseRevision() { return (ItemDocument) super.getBaseRevision(); } @Override public ItemUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } @Override public ItemUpdateBuilder updateLabels(TermUpdate update) { super.updateLabels(update); return this; } @Override public ItemUpdateBuilder updateDescriptions(TermUpdate update) { super.updateDescriptions(update); return this; } @Override public ItemUpdateBuilder updateAliases(String language, AliasUpdate update) { super.updateAliases(language, update); return this; } /** * Adds or replaces site link. If there is no site link for the site key, new * site link is added. If a site link with this site key already exists, it is * replaced. Site links with other site keys are not touched. Calling this * method overrides any previous changes made with the same site key by this * method or {@link #removeSiteLink(String)}. *

* If base entity revision was provided, attempt to overwrite some site link * with identical site link will be silently ignored, resulting in empty update. * * @param link * new or replacement site link * @return {@code this} (fluent method) * @throws NullPointerException * if {@code link} is {@code null} */ public ItemUpdateBuilder putSiteLink(SiteLink link) { Objects.requireNonNull(link, "Site link cannot be null."); if (getBaseRevision() != null) { SiteLink original = getBaseRevision().getSiteLinks().get(link.getSiteKey()); if (link.equals(original)) { modifiedSiteLinks.remove(link.getSiteKey()); removedSiteLinks.remove(link.getSiteKey()); return this; } } modifiedSiteLinks.put(link.getSiteKey(), link); removedSiteLinks.remove(link.getSiteKey()); return this; } /** * Removes site link. Site links with other site keys are not touched. Calling * this method overrides any previous changes made with the same site key by * this method or {@link #putSiteLink(SiteLink)}. *

* If base entity revision was provided, attempts to remove missing site links * will be silently ignored, resulting in empty update. * * @param site * site key of the removed site link * @return {@code this} (fluent method) * @throws NullPointerException * if {@code site} is {@code null} * @throws IllegalArgumentException * if {@code site} is blank */ public ItemUpdateBuilder removeSiteLink(String site) { Validate.notBlank(site, "Site key cannot be null."); if (getBaseRevision() != null && !getBaseRevision().getSiteLinks().containsKey(site)) { modifiedSiteLinks.remove(site); return this; } removedSiteLinks.add(site); modifiedSiteLinks.remove(site); return this; } /** * Replays all changes in provided update into this builder object. Changes from * the update are added on top of changes already present in this builder * object. * * @param update * item update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if {@code update} cannot be applied to base entity revision (if * available) */ public ItemUpdateBuilder append(ItemUpdate update) { super.append(update); for (SiteLink link : update.getModifiedSiteLinks().values()) { putSiteLink(link); } for (String site : update.getRemovedSiteLinks()) { removeSiteLink(site); } return this; } @Override public ItemUpdate build() { return Datamodel.makeItemUpdate(getEntityId(), getBaseRevisionId(), labels, descriptions, aliases, statements, modifiedSiteLinks.values(), removedSiteLinks); } } JsonDeserializer.java000066400000000000000000000117011444772566300353320ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2020 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.EntityRedirectDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.ItemDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.LexemeDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.MediaInfoDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.PropertyDocumentImpl; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityRedirectDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectReader; /** * Helper to deserialize datamodel objects from their * JSON representation. * * We accept empty arrays as empty maps since there has * been a confusion in the past between the two: * https://phabricator.wikimedia.org/T138104 * * @author Antonin Delpeuch */ public class JsonDeserializer { private ObjectReader entityDocumentReader; private ObjectReader itemReader; private ObjectReader propertyReader; private ObjectReader lexemeReader; private ObjectReader mediaInfoReader; private ObjectReader entityRedirectReader; /** * Constructs a new JSON deserializer for the * designated site. * * @param siteIri * Root IRI of the site to deserialize for */ public JsonDeserializer(String siteIri) { DatamodelMapper mapper = new DatamodelMapper(siteIri); entityDocumentReader = mapper.readerFor(EntityDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); itemReader = mapper.readerFor(ItemDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); propertyReader = mapper.readerFor(PropertyDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); lexemeReader = mapper.readerFor(LexemeDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); mediaInfoReader = mapper.readerFor(MediaInfoDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); entityRedirectReader = mapper.readerFor(EntityRedirectDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); } /** * Deserializes a JSON string into an {@link ItemDocument}. * @throws JsonProcessingException if the JSON payload is invalid */ public ItemDocument deserializeItemDocument(String json) throws JsonProcessingException { return itemReader.readValue(json); } /** * Deserializes a JSON string into a {@link PropertyDocument}. * @throws JsonProcessingException if the JSON payload is invalid */ public PropertyDocument deserializePropertyDocument(String json) throws JsonProcessingException { return propertyReader.readValue(json); } /** * Deserializes a JSON string into a {@link LexemeDocument}. * @throws JsonProcessingException if the JSON payload is invalid */ public LexemeDocument deserializeLexemeDocument(String json) throws JsonProcessingException { return lexemeReader.readValue(json); } /** * Deserializes a JSON string into a {@link MediaInfoDocument}. * @throws JsonProcessingException if the JSON payload is invalid */ public MediaInfoDocument deserializeMediaInfoDocument(String json) throws JsonProcessingException { return mediaInfoReader.readValue(json); } /** * Deserializes a JSON string into a {@link EntityDocument}. * @throws JsonProcessingException if the JSON payload is invalid */ public EntityDocument deserializeEntityDocument(String json) throws JsonProcessingException { return entityDocumentReader.readValue(json); } /** * Deserializes a JSON string into a {@link EntityRedirectDocument}. * @throws JsonProcessingException if the JSON payload is invalid */ public EntityRedirectDocument deserializeEntityRedirectDocument(String json) throws JsonProcessingException { return entityRedirectReader.readValue(json); } } JsonSerializer.java000066400000000000000000000174111444772566300350250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentDumpProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.Statement; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; /** * This class implements {@link EntityDocumentDumpProcessor} to provide a * serializer for {@link EntityDocument} objects in JSON. *

* The implementation does not check if {@link #open()} has been called before * the first document is serialized. It is the responsibility of the caller to * do this. *

* Implementations of the data model are expected to be appropriately serializable * to JSON with Jackson. * * @author Markus Kroetzsch * */ public class JsonSerializer implements EntityDocumentDumpProcessor { private static final Logger logger = LoggerFactory.getLogger(JsonSerializer.class); private static final byte[] JSON_START_LIST = "[\n".getBytes(StandardCharsets.UTF_8); private static final byte[] JSON_SEP = ",\n".getBytes(StandardCharsets.UTF_8); private static final byte[] JSON_END_LIST = "\n]".getBytes(StandardCharsets.UTF_8); /** * The stream that the resulting JSON is written to. */ private final OutputStream outputStream; /** * Object mapper that is used to serialize JSON. */ protected static final ObjectMapper mapper = new ObjectMapper(); static { mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); /* * Support for Optional properties. */ mapper.registerModule(new Jdk8Module()); } /** * Counter for the number of documents serialized so far. */ private int entityDocumentCount; /** * Creates a new JSON serializer that writes its output to the given stream. * The output stream will be managed by the object, i.e., it will be closed * when {@link #close()} is call ed. * * @param outputStream * the output stream to write to */ public JsonSerializer(OutputStream outputStream) { this.outputStream = outputStream; } @Override public void open() { this.entityDocumentCount = 0; try { this.outputStream.write(JSON_START_LIST); } catch (IOException e) { reportException(e); } } @Override public void processItemDocument(ItemDocument itemDocument) { serializeEntityDocument(itemDocument); } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { serializeEntityDocument(propertyDocument); } @Override public void processLexemeDocument(LexemeDocument lexemeDocument) { serializeEntityDocument(lexemeDocument); } @Override public void processMediaInfoDocument(MediaInfoDocument mediaInfoDocument) { serializeEntityDocument(mediaInfoDocument); } @Override public void close() { try { this.outputStream.write(JSON_END_LIST); this.outputStream.close(); } catch (IOException e) { reportException(e); } } /** * Returns the number of entity documents serialized so far. * * @return number of serialized entity documents */ public int getEntityDocumentCount() { return this.entityDocumentCount; } /** * Reports a given exception as a RuntimeException, since the interface does * not allow us to throw checked exceptions directly. * * @param e * the exception to report * @throws RuntimeException * in all cases */ private void reportException(Exception e) { logger.error("Failed to write JSON export: " + e.toString()); throw new RuntimeException(e.toString(), e); } /** * Writes the JSON serialization of the given {@link EntityDocument}. * * @param entityDocument * the document to serialize */ private void serializeEntityDocument(EntityDocument entityDocument) { try { if (this.entityDocumentCount > 0) { this.outputStream.write(JSON_SEP); } mapper.writeValue(this.outputStream, entityDocument); } catch (IOException e) { reportException(e); } this.entityDocumentCount++; } /** * Serializes the given object in JSON and returns the resulting string. * Throws if the serialization fails. * * @param entityDocument * object to serialize * @return JSON serialization * @throws JsonProcessingException if the object cannot be serialized */ public static String getJsonString(EntityDocument entityDocument) throws JsonProcessingException { return mapper.writeValueAsString(entityDocument); } /** * Serializes the given object in JSON and returns the resulting string. * Throws if the serialization fails. * * @param itemDocument * object to serialize * @return JSON serialization * @throws JsonProcessingException if the object cannot be serialized */ public static String getJsonString(ItemDocument itemDocument) throws JsonProcessingException { return mapper.writeValueAsString(itemDocument); } /** * Serializes the given object in JSON and returns the resulting string. * Throws if the serialization fails. * * @param propertyDocument * object to serialize * @return JSON serialization * @throws JsonProcessingException if the object cannot be serialized */ public static String getJsonString(PropertyDocument propertyDocument) throws JsonProcessingException { return mapper.writeValueAsString(propertyDocument); } /** * Serializes the given object in JSON and returns the resulting string. * Throws if the serialization fails. * * @param mediaInfoDocument * object to serialize * @return JSON serialization * @throws JsonProcessingException if the object cannot be serialized */ public static String getJsonString(MediaInfoDocument mediaInfoDocument) throws JsonProcessingException { return mapper.writeValueAsString(mediaInfoDocument); } /** * Serializes the given object in JSON and returns the resulting string. * Throws if the serialization fails. * * @param statement * object to serialize * @return JSON serialization * @throws JsonProcessingException if the object cannot be serialized */ public static String getJsonString(Statement statement) throws JsonProcessingException { return mapper.writeValueAsString(statement); } /** * Serializes the given object in JSON and returns the resulting string. * Throws if the serialization fails. * * @param update * object to serialize * @return JSON serialization * @throws JsonProcessingException if the object cannot be serialized */ public static String getJsonString(EntityUpdate update) throws JsonProcessingException { return mapper.writeValueAsString(update); } } LabeledDocumentUpdateBuilder.java000066400000000000000000000162731444772566300375700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Objects; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocument; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocument; /** * Builder for incremental construction of * {@link LabeledStatementDocumentUpdate} objects. */ public abstract class LabeledDocumentUpdateBuilder extends StatementDocumentUpdateBuilder { TermUpdate labels = TermUpdate.EMPTY; /** * Initializes new builder object for constructing update of entity with given * ID. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is a placeholder ID */ protected LabeledDocumentUpdateBuilder(EntityIdValue entityId, long revisionId) { super(entityId, revisionId); } /** * Initializes new builder object for constructing update of given base entity * revision. * * @param revision * base entity revision to be updated * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ protected LabeledDocumentUpdateBuilder(LabeledStatementDocument revision) { super(revision); } /** * Creates new builder object for constructing update of entity with given * revision ID. *

* Supported entity IDs include {@link ItemIdValue}, {@link PropertyIdValue}, * and {@link MediaInfoIdValue}. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it is a * placeholder ID */ public static LabeledDocumentUpdateBuilder forBaseRevisionId(EntityIdValue entityId, long revisionId) { Objects.requireNonNull(entityId, "Entity ID cannot be null."); if (entityId instanceof MediaInfoIdValue) { return MediaInfoUpdateBuilder.forBaseRevisionId((MediaInfoIdValue) entityId, revisionId); } return TermedDocumentUpdateBuilder.forBaseRevisionId(entityId, revisionId); } /** * Creates new builder object for constructing update of entity with given ID. *

* Supported entity IDs include {@link ItemIdValue}, {@link PropertyIdValue}, * and {@link MediaInfoIdValue}. * * @param entityId * ID of the entity that is to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it is a * placeholder ID */ public static LabeledDocumentUpdateBuilder forEntityId(EntityIdValue entityId) { return forBaseRevisionId(entityId, 0); } /** * Creates new builder object for constructing update of given base entity * revision. Provided entity document might not represent the latest revision of * the entity as currently stored in Wikibase. It will be used for validation in * builder methods. If the document has revision ID, it will be used to detect * edit conflicts. *

* Supported entity types include {@link ItemDocument}, * {@link PropertyDocument}, and {@link MediaInfoDocument}. * * @param revision * base entity revision to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} is of unrecognized type or its ID is a * placeholder ID */ public static LabeledDocumentUpdateBuilder forBaseRevision(LabeledStatementDocument revision) { Objects.requireNonNull(revision, "Base entity revision cannot be null."); if (revision instanceof MediaInfoDocument) { return MediaInfoUpdateBuilder.forBaseRevision((MediaInfoDocument) revision); } if (revision instanceof TermedStatementDocument) { return TermedDocumentUpdateBuilder.forBaseRevision((TermedStatementDocument) revision); } throw new IllegalArgumentException("Unrecognized entity document type."); } @Override LabeledStatementDocument getBaseRevision() { return (LabeledStatementDocument) super.getBaseRevision(); } @Override public LabeledDocumentUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } /** * Updates entity labels. If this method is called multiple times, changes are * accumulated. If base entity revision was provided, redundant changes are * silently ignored, resulting in empty update. * * @param update * changes in entity labels * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public LabeledDocumentUpdateBuilder updateLabels(TermUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); TermUpdateBuilder combined = getBaseRevision() != null ? TermUpdateBuilder.forTerms(getBaseRevision().getLabels().values()) : TermUpdateBuilder.create(); combined.append(labels); combined.append(update); labels = combined.build(); return this; } void append(LabeledStatementDocumentUpdate update) { super.append(update); updateLabels(update.getLabels()); } /** * Creates new {@link LabeledStatementDocumentUpdate} object with contents of * this builder object. * * @return constructed object */ @Override public abstract LabeledStatementDocumentUpdate build(); } LexemeDeserializer.java000066400000000000000000000121461444772566300356440ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2022 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JacksonException; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.exc.InvalidFormatException; import com.fasterxml.jackson.databind.node.NullNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.type.TypeFactory; import org.wikidata.wdtk.datamodel.implementation.*; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; /** * Custom deserializer, very close to the default serializer generated by Jackson, * only to also accept empty json objects ({}) in place of empty lists for the list of senses. * https://github.com/Wikidata/Wikidata-Toolkit/issues/568 */ public class LexemeDeserializer extends StdDeserializer { public LexemeDeserializer() { super(LexemeDocumentImpl.class); } @Override public LexemeDocumentImpl deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JacksonException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); if (! (node instanceof ObjectNode)) { throw new IOException("Deserializing a lexeme can only be done from a JSON object"); } ObjectNode object = (ObjectNode) node; String jsonId = object.get("id").asText(); String lexicalCategory = object.get("lexicalCategory").asText(); String language = object.get("language").asText(); TypeFactory typeFactory = deserializationContext.getTypeFactory(); JsonNode lemmas1 = object.get("lemmas"); Map preLemmas = deserializationContext.readTreeAsValue( lemmas1, typeFactory.constructMapType(Map.class, String.class, TermImpl.class)); Map lemmas = null; if (preLemmas != null) { lemmas = preLemmas.entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, v -> v.getValue())); } Map> claims = deserializationContext.readTreeAsValue( object.get("claims"), typeFactory.constructMapType(Map.class, typeFactory.constructType(String.class), typeFactory.constructCollectionType(List.class, StatementImpl.PreStatement.class))); List forms; if (object.get("forms") instanceof ObjectNode && ((ObjectNode)object.get("forms")).isEmpty()) { forms = Collections.emptyList(); } else { forms = deserializationContext.readTreeAsValue( object.get("forms"), typeFactory.constructCollectionType(List.class, FormDocumentImpl.class)); } List senses; if (object.get("senses") instanceof ObjectNode && ((ObjectNode)object.get("senses")).isEmpty()) { // case for https://github.com/Wikidata/Wikidata-Toolkit/issues/568 senses = Collections.emptyList(); } else { senses = deserializationContext.readTreeAsValue( object.get("senses"), typeFactory.constructCollectionType(List.class, SenseDocumentImpl.class)); } long lastrevid = 0; if (object.has("lastrevid")) { lastrevid = object.get("lastrevid").asLong(); } String siteIri = (String) deserializationContext.findInjectableValue("siteIri", null , null); return new LexemeDocumentImpl( jsonId, lexicalCategory, language, lemmas, claims, forms, senses, lastrevid, siteIri ); } } LexemeUpdateBuilder.java000066400000000000000000000375021444772566300357560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link LexemeUpdate} objects. */ public class LexemeUpdateBuilder extends StatementDocumentUpdateBuilder { private ItemIdValue language; private ItemIdValue lexicalCategory; private TermUpdate lemmas = TermUpdate.EMPTY; private final List addedSenses = new ArrayList<>(); private final Map updatedSenses = new HashMap<>(); private final Set removedSenses = new HashSet<>(); private final List addedForms = new ArrayList<>(); private final Map updatedForms = new HashMap<>(); private final Set removedForms = new HashSet<>(); private LexemeUpdateBuilder(LexemeIdValue lexemeId, long revisionId) { super(lexemeId, revisionId); } private LexemeUpdateBuilder(LexemeDocument revision) { super(revision); } /** * Creates new builder object for constructing update of lexeme entity with * given revision ID. * * @param lexemeId * ID of the lexeme that is to be updated * @param revisionId * ID of the base lexeme revision to be updated or zero if not * available * @return update builder object * @throws NullPointerException * if {@code lexemeId} is {@code null} * @throws IllegalArgumentException * if {@code lexemeId} is a placeholder ID */ public static LexemeUpdateBuilder forBaseRevisionId(LexemeIdValue lexemeId, long revisionId) { return new LexemeUpdateBuilder(lexemeId, revisionId); } /** * Creates new builder object for constructing update of lexeme entity with * given ID. * * @param lexemeId * ID of the lexeme that is to be updated * @return update builder object * @throws NullPointerException * if {@code lexemeId} is {@code null} * @throws IllegalArgumentException * if {@code lexemeId} is a placeholder ID */ public static LexemeUpdateBuilder forEntityId(LexemeIdValue lexemeId) { return new LexemeUpdateBuilder(lexemeId, 0); } /** * Creates new builder object for constructing update of given base lexeme * entity revision. Provided lexeme document might not represent the latest * revision of the lexeme entity as currently stored in Wikibase. It will be * used for validation in builder methods. If the document has revision ID, it * will be used to detect edit conflicts. * * @param revision * base lexeme entity revision to be updated * @return update builder object * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ public static LexemeUpdateBuilder forBaseRevision(LexemeDocument revision) { return new LexemeUpdateBuilder(revision); } @Override LexemeIdValue getEntityId() { return (LexemeIdValue) super.getEntityId(); } @Override LexemeDocument getBaseRevision() { return (LexemeDocument) super.getBaseRevision(); } @Override public LexemeUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } /** * Sets lexeme language. If base entity revision was provided, attempt to * replace lexeme language with the same value is silently ignored, resulting in * empty update. * * @param language * new lexeme language * @return {@code this} (fluent method) * @throws NullPointerException * if {@code language} is {@code null} * @throws IllegalArgumentException * if {@code language} is an invalid ID */ public LexemeUpdateBuilder setLanguage(ItemIdValue language) { Objects.requireNonNull(language, "Language cannot be null."); Validate.isTrue(!language.isPlaceholder(), "Language ID cannot be a placeholder ID."); if (getBaseRevision() != null && getBaseRevision().getLanguage().equals(language)) { this.language = null; return this; } this.language = language; return this; } /** * Sets lexical category of the lexeme. If base entity revision was provided, * attempt to replace lexical category with the same value is silently ignored, * resulting in empty update. * * @param category * new lexical category * @return {@code this} (fluent method) * @throws NullPointerException * if {@code category} is {@code null} * @throws IllegalArgumentException * if {@code category} is an invalid ID */ public LexemeUpdateBuilder setLexicalCategory(ItemIdValue category) { Objects.requireNonNull(category, "Lexical category cannot be null."); Validate.isTrue(!category.isPlaceholder(), "Lexical category ID cannot be a placeholder ID."); if (getBaseRevision() != null && getBaseRevision().getLexicalCategory().equals(category)) { lexicalCategory = null; return this; } lexicalCategory = category; return this; } /** * Updates lemmas. If this method is called multiple times, changes are * accumulated. If base entity revision was provided, redundant changes are * silently ignored, resulting in empty update. * * @param update * changes in lemmas * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public LexemeUpdateBuilder updateLemmas(TermUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); TermUpdateBuilder combined = getBaseRevision() != null ? TermUpdateBuilder.forTerms(getBaseRevision().getLemmas().values()) : TermUpdateBuilder.create(); combined.append(lemmas); combined.append(update); lemmas = combined.build(); return this; } /** * Adds sense to the lexeme. If {@code sense} has an ID (perhaps because it is a * modified copy of another sense), its ID is stripped to ensure the sense is * added and no other sense is modified. * * @param sense * new sense to add * @return {@code this} (fluent method) * @throws NullPointerException * if {@code sense} is {@code null} */ public LexemeUpdateBuilder addSense(SenseDocument sense) { Objects.requireNonNull(sense, "Sense cannot be null."); if (!sense.getEntityId().isPlaceholder()) { sense = sense.withEntityId(SenseIdValue.NULL); } if (sense.getRevisionId() != 0) { sense = sense.withRevisionId(0); } addedSenses.add(sense); return this; } /** * Updates existing sense in the lexeme. If this method is called multiple * times, changes are accumulated. If base entity revision was provided, the * update is checked against it and redundant changes are silently ignored, * resulting in empty update. * * @param update * update of existing sense * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if the sense does not exist in base revision (if available) or * the update cannot be applied to it * @throws IllegalStateException * if the sense was removed by calling * {@link #removeSense(SenseIdValue)} */ public LexemeUpdateBuilder updateSense(SenseUpdate update) { Objects.requireNonNull(update, "Sense update cannot be null."); SenseIdValue id = update.getEntityId(); Validate.validState(!removedSenses.contains(id), "Cannot update removed sense."); SenseUpdateBuilder builder; if (getBaseRevision() != null) { SenseDocument original = getBaseRevision().getSenses().stream() .filter(s -> s.getEntityId().equals(id)) .findFirst().orElse(null); Validate.isTrue(original != null, "Cannot update sense that is not in the base revision."); builder = SenseUpdateBuilder.forBaseRevision(original.withRevisionId(getBaseRevisionId())); } else { builder = SenseUpdateBuilder.forBaseRevisionId(id, getBaseRevisionId()); } SenseUpdate prior = updatedSenses.get(id); if (prior != null) { builder.append(prior); } builder.append(update); SenseUpdate combined = builder.build(); if (!combined.isEmpty()) { updatedSenses.put(id, combined); } else { updatedSenses.remove(id); } return this; } /** * Removes existing sense from the lexeme. Removing the same sense ID twice is * silently tolerated. Any prior changes made by calling * {@link #updateSense(SenseUpdate)} are discarded. * * @param senseId * ID of the removed sense * @return {@code this} (fluent method) * @throws NullPointerException * if {@code senseId} is {@code null} * @throws IllegalArgumentException * if {@code senseId} is not valid or if such ID does not exist in * base revision (if available) */ public LexemeUpdateBuilder removeSense(SenseIdValue senseId) { Objects.requireNonNull(senseId, "Sense ID cannot be null."); Validate.isTrue(!senseId.isPlaceholder(), "ID of removed sense cannot be a placeholder ID."); if (getBaseRevision() != null) { Validate.isTrue(getBaseRevision().getSenses().stream().anyMatch(s -> s.getEntityId().equals(senseId)), "Cannot remove sense that is not in the base revision."); } removedSenses.add(senseId); updatedSenses.remove(senseId); return this; } /** * Adds form to the lexeme. If {@code form} has an ID (perhaps because it is a * modified copy of another form), its ID is stripped to ensure the form is * added and no other form is modified. * * @param form * new form to add * @return {@code this} (fluent method) * @throws NullPointerException * if {@code form} is {@code null} */ public LexemeUpdateBuilder addForm(FormDocument form) { Objects.requireNonNull(form, "Form cannot be null."); if (!form.getEntityId().isPlaceholder()) { form = form.withEntityId(FormIdValue.NULL); } if (form.getRevisionId() != 0) { form = form.withRevisionId(0); } addedForms.add(form); return this; } /** * Updates existing form in the lexeme. If this method is called multiple times, * changes are accumulated. If base entity revision was provided, the update is * checked against it and redundant changes are silently ignored, resulting in * empty update. * * @param update * update of existing form * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if the form does not exist in base revision (if available) or the * update cannot be applied to it * @throws IllegalStateException * if the form was removed by calling * {@link #removeForm(FormIdValue)} */ public LexemeUpdateBuilder updateForm(FormUpdate update) { Objects.requireNonNull(update, "Form update cannot be null."); FormIdValue id = update.getEntityId(); Validate.validState(!removedForms.contains(id), "Cannot update removed form."); FormUpdateBuilder builder; if (getBaseRevision() != null) { FormDocument original = getBaseRevision().getForms().stream() .filter(s -> s.getEntityId().equals(id)) .findFirst().orElse(null); Validate.isTrue(original != null, "Cannot update form that is not in the base revision."); builder = FormUpdateBuilder.forBaseRevision(original.withRevisionId(getBaseRevisionId())); } else { builder = FormUpdateBuilder.forBaseRevisionId(id, getBaseRevisionId()); } FormUpdate prior = updatedForms.get(id); if (prior != null) { builder.append(prior); } builder.append(update); FormUpdate combined = builder.build(); if (!combined.isEmpty()) { updatedForms.put(id, combined); } else { updatedForms.remove(id); } return this; } /** * Removes existing form from the lexeme. Calling this method overrides any * previous changes made to the same form ID by {@link #updateForm(FormUpdate)}. * Removing the same form ID twice is silently tolerated. * * @param formId * ID of the removed form * @return {@code this} (fluent method) * @throws NullPointerException * if {@code formId} is {@code null} * @throws IllegalArgumentException * if {@code formId} is not valid or if such ID does not exist in * current version of the lexeme document (if available) */ public LexemeUpdateBuilder removeForm(FormIdValue formId) { Objects.requireNonNull(formId, "Form ID cannot be null."); Validate.isTrue(!formId.isPlaceholder(), "ID of removed form cannot be a placeholder ID."); if (getBaseRevision() != null) { Validate.isTrue(getBaseRevision().getForms().stream().anyMatch(s -> s.getEntityId().equals(formId)), "Cannot remove form that is not in the base revision."); } removedForms.add(formId); updatedForms.remove(formId); return this; } /** * Replays all changes in provided update into this builder object. Changes from * the update are added on top of changes already present in this builder * object. * * @param update * lexeme update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if {@code update} cannot be applied to base entity revision (if * available) */ public LexemeUpdateBuilder append(LexemeUpdate update) { super.append(update); if (update.getLanguage().isPresent()) { setLanguage(update.getLanguage().get()); } if (update.getLexicalCategory().isPresent()) { setLexicalCategory(update.getLexicalCategory().get()); } updateLemmas(update.getLemmas()); for (SenseDocument sense : update.getAddedSenses()) { addSense(sense); } for (SenseUpdate sense : update.getUpdatedSenses().values()) { updateSense(sense); } for (SenseIdValue senseId : update.getRemovedSenses()) { removeSense(senseId); } for (FormDocument form : update.getAddedForms()) { addForm(form); } for (FormUpdate form : update.getUpdatedForms().values()) { updateForm(form); } for (FormIdValue formId : update.getRemovedForms()) { removeForm(formId); } return this; } @Override public LexemeUpdate build() { return Datamodel.makeLexemeUpdate(getEntityId(), getBaseRevisionId(), language, lexicalCategory, lemmas, statements, addedSenses, updatedSenses.values(), removedSenses, addedForms, updatedForms.values(), removedForms); } } MediaInfoUpdateBuilder.java000066400000000000000000000111011444772566300363550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link MediaInfoUpdate} objects. */ public class MediaInfoUpdateBuilder extends LabeledDocumentUpdateBuilder { private MediaInfoUpdateBuilder(MediaInfoIdValue mediaInfoId, long revisionId) { super(mediaInfoId, revisionId); } private MediaInfoUpdateBuilder(MediaInfoDocument revision) { super(revision); } /** * Creates new builder object for constructing update of media entity with given * revision ID. * * @param mediaInfoId * ID of the media entity that is to be updated * @param revisionId * ID of the base media entity revision to be updated or zero if not * available * @return update builder object * @throws NullPointerException * if {@code mediaInfoId} is {@code null} * @throws IllegalArgumentException * if {@code mediaInfoId} is a placeholder ID */ public static MediaInfoUpdateBuilder forBaseRevisionId(MediaInfoIdValue mediaInfoId, long revisionId) { return new MediaInfoUpdateBuilder(mediaInfoId, revisionId); } /** * Creates new builder object for constructing update of media entity with given * ID. * * @param mediaInfoId * ID of the media entity that is to be updated * @return update builder object * @throws NullPointerException * if {@code mediaInfoId} is {@code null} * @throws IllegalArgumentException * if {@code mediaInfoId} is a placeholder ID */ public static MediaInfoUpdateBuilder forEntityId(MediaInfoIdValue mediaInfoId) { return new MediaInfoUpdateBuilder(mediaInfoId, 0); } /** * Creates new builder object for constructing update of given base media entity * revision. Provided media document might not represent the latest revision of * the media entity as currently stored in Wikibase. It will be used for * validation in builder methods. If the document has revision ID, it will be * used to detect edit conflicts. * * @param revision * base media entity revision to be updated * @return update builder object * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ public static MediaInfoUpdateBuilder forBaseRevision(MediaInfoDocument revision) { return new MediaInfoUpdateBuilder(revision); } @Override MediaInfoIdValue getEntityId() { return (MediaInfoIdValue) super.getEntityId(); } @Override MediaInfoDocument getBaseRevision() { return (MediaInfoDocument) super.getBaseRevision(); } @Override public MediaInfoUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } @Override public MediaInfoUpdateBuilder updateLabels(TermUpdate update) { super.updateLabels(update); return this; } /** * Replays all changes in provided update into this builder object. Changes from * the update are added on top of changes already present in this builder * object. * * @param update * media update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if {@code update} cannot be applied to base entity revision (if * available) */ public MediaInfoUpdateBuilder apply(MediaInfoUpdate update) { super.append(update); return this; } @Override public MediaInfoUpdate build() { return Datamodel.makeMediaInfoUpdate(getEntityId(), getBaseRevisionId(), labels, statements); } } PropertyDocumentBuilder.java000066400000000000000000000124331444772566300367130ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; /** * Builder class to construct {@link PropertyDocument} objects. * * @author Markus Kroetzsch * */ public class PropertyDocumentBuilder extends EntityDocumentBuilder { private final DatatypeIdValue datatype; /** * Constructor when building the property document from scratch. * * @param propertyIdValue * id of the newly constructed property document * @param datatype * the datatype of the newly constructed property document */ protected PropertyDocumentBuilder(PropertyIdValue propertyIdValue, DatatypeIdValue datatype) { super(propertyIdValue); this.datatype = datatype; } /** * Constructor when building the property document from an existing one. * * @param initialDocument * the initial property document to start the build from */ protected PropertyDocumentBuilder(PropertyDocument initialDocument) { super(initialDocument); this.datatype = initialDocument.getDatatype(); } /** * Starts the construction of an {@link PropertyDocument} with the given id. * * @param propertyIdValue * id of the newly constructed property document * @param datatype * the datatype of the newly constructed property document * @return builder object to continue construction */ public static PropertyDocumentBuilder forPropertyIdAndDatatype( PropertyIdValue propertyIdValue, DatatypeIdValue datatype) { return new PropertyDocumentBuilder(propertyIdValue, datatype); } /** * Starts the construction of an {@link PropertyDocument} from the existing * document. * * @param initialDocument * the existing document to start the build from * @return builder object to continue construction */ public static PropertyDocumentBuilder fromPropertyDocument( PropertyDocument initialDocument) { return new PropertyDocumentBuilder(initialDocument); } /** * Starts the construction of an {@link PropertyDocument} with the given id. * * @param propertyIdValue * id of the newly constructed property document * @param datatypeId * the datatype id of the newly constructed property document, * e.g., {@link DatatypeIdValue#DT_ITEM}. * @return builder object to continue construction * @deprecated use {@link #forPropertyIdAndJsonDatatype(PropertyIdValue, String)} */ public static PropertyDocumentBuilder forPropertyIdAndDatatype( PropertyIdValue propertyIdValue, String datatypeId) { return forPropertyIdAndDatatype(propertyIdValue, factory.getDatatypeIdValue(datatypeId)); } /** * Starts the construction of an {@link PropertyDocument} with the given id. * * @param propertyIdValue * id of the newly constructed property document * @param datatypeId * the datatype id of the newly constructed property document, * e.g., {@link DatatypeIdValue#DT_ITEM}. * @return builder object to continue construction */ public static PropertyDocumentBuilder forPropertyIdAndJsonDatatype( PropertyIdValue propertyIdValue, String datatypeId) { return forPropertyIdAndDatatype(propertyIdValue, factory.getDatatypeIdValueFromJsonId(datatypeId)); } /** * Changes the entity value id for the constructed document. * See {@link EntityDocument#getEntityId()}. * * @param entityId * the entity id, which must be an ItemIdValue * @return builder object to continue construction */ @Override public PropertyDocumentBuilder withEntityId(EntityIdValue entityId) { if (!(entityId instanceof PropertyIdValue)) { throw new IllegalArgumentException("The entity id of a PropertyDocument must be an PropertyIdValue."); } return super.withEntityId(entityId); } /** * Returns the {@link ItemDocument} that has been built. * * @return constructed item document * @throws IllegalStateException * if the object was built already */ @Override public PropertyDocument build() { prepareBuild(); return factory.getPropertyDocument( (PropertyIdValue) this.entityIdValue, this.labels, this.descriptions, this.aliases, getStatementGroups(), this.datatype, this.revisionId); } @Override protected PropertyDocumentBuilder getThis() { return this; } } PropertyUpdateBuilder.java000066400000000000000000000116501444772566300363570ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link PropertyUpdate} objects. */ public class PropertyUpdateBuilder extends TermedDocumentUpdateBuilder { private PropertyUpdateBuilder(PropertyIdValue propertyId, long revisionId) { super(propertyId, revisionId); } private PropertyUpdateBuilder(PropertyDocument revision) { super(revision); } /** * Creates new builder object for constructing update of property entity with * given revision ID. * * @param propertyId * ID of the property entity that is to be updated * @param revisionId * ID of the base property revision to be updated or zero if not * available * @return update builder object * @throws NullPointerException * if {@code propertyId} is {@code null} * @throws IllegalArgumentException * if {@code propertyId} is a placeholder ID */ public static PropertyUpdateBuilder forBaseRevisionId(PropertyIdValue propertyId, long revisionId) { return new PropertyUpdateBuilder(propertyId, revisionId); } /** * Creates new builder object for constructing update of property entity with * given ID. * * @param propertyId * ID of the property entity that is to be updated * @return update builder object * @throws NullPointerException * if {@code propertyId} is {@code null} * @throws IllegalArgumentException * if {@code propertyId} is a placeholder ID */ public static PropertyUpdateBuilder forEntityId(PropertyIdValue propertyId) { return new PropertyUpdateBuilder(propertyId, 0); } /** * Creates new builder object for constructing update of given base property * entity revision. Provided property document might not represent the latest * revision of the property entity as currently stored in Wikibase. It will be * used for validation in builder methods. If the document has revision ID, it * will be used to detect edit conflicts. * * @param revision * base property entity revision to be updated * @return update builder object * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ public static PropertyUpdateBuilder forBaseRevision(PropertyDocument revision) { return new PropertyUpdateBuilder(revision); } @Override PropertyIdValue getEntityId() { return (PropertyIdValue) super.getEntityId(); } @Override PropertyDocument getBaseRevision() { return (PropertyDocument) super.getBaseRevision(); } @Override public PropertyUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } @Override public PropertyUpdateBuilder updateLabels(TermUpdate update) { super.updateLabels(update); return this; } @Override public PropertyUpdateBuilder updateDescriptions(TermUpdate update) { super.updateDescriptions(update); return this; } @Override public PropertyUpdateBuilder updateAliases(String language, AliasUpdate update) { super.updateAliases(language, update); return this; } /** * Replays all changes in provided update into this builder object. Changes from * the update are added on top of changes already present in this builder * object. * * @param update * property update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if {@code update} cannot be applied to base entity revision (if * available) */ public PropertyUpdateBuilder append(PropertyUpdate update) { super.append(update); return this; } @Override public PropertyUpdate build() { return Datamodel.makePropertyUpdate(getEntityId(), getBaseRevisionId(), labels, descriptions, aliases, statements); } } ReferenceBuilder.java000066400000000000000000000102001444772566300352540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Value; public class ReferenceBuilder extends AbstractDataObjectBuilder { final HashMap> snaks = new HashMap<>(); /** * Constructor. */ protected ReferenceBuilder() { } /** * Starts the construction of a {@link Reference}. * * @return builder object to continue construction */ public static ReferenceBuilder newInstance() { return new ReferenceBuilder(); } @Override public Reference build() { prepareBuild(); return factory.getReference(getSnakGroups()); } /** * Adds the given property and value to the constructed reference. * * @param propertyIdValue * the property to add * @param value * the value to add * @return builder object to continue construction */ public ReferenceBuilder withPropertyValue(PropertyIdValue propertyIdValue, Value value) { getSnakList(propertyIdValue).add( factory.getValueSnak(propertyIdValue, value)); return getThis(); } /** * Adds a {@link SomeValueSnak} with the given property to the constructed * reference. *

* Note that it might not be meaningful to use {@link SomeValueSnak} in a * reference, depending on the policies of the wiki. * * @param propertyIdValue * the property of the snak * @return builder object to continue construction */ public ReferenceBuilder withSomeValue(PropertyIdValue propertyIdValue) { getSnakList(propertyIdValue).add( factory.getSomeValueSnak(propertyIdValue)); return getThis(); } /** * Adds a {@link NoValueSnak} with the given property to the constructed * reference. *

* Note that it might not be meaningful to use {@link NoValueSnak} in a * reference. It is usually implicitly assumed that all snaks that are not * given have no value for a particular reference. Otherwise one would need * large numbers of {@link NoValueSnak} entries for every reference! * * @param propertyIdValue * the property of the snak * @return builder object to continue construction */ public ReferenceBuilder withNoValue(PropertyIdValue propertyIdValue) { getSnakList(propertyIdValue).add( factory.getNoValueSnak(propertyIdValue)); return getThis(); } @Override protected ReferenceBuilder getThis() { return this; } /** * Returns a list of {@link SnakGroup} objects for the currently stored * snaks. * * @return */ protected List getSnakGroups() { ArrayList result = new ArrayList<>(this.snaks.size()); for (ArrayList statementList : this.snaks.values()) { result.add(factory.getSnakGroup(statementList)); } return result; } /** * Returns the list of {@link Snak} objects for a given property. * * @param propertyIdValue * @return */ protected ArrayList getSnakList(PropertyIdValue propertyIdValue) { if(!snaks.containsKey(propertyIdValue)) { snaks.put(propertyIdValue, new ArrayList<>()); } return this.snaks.get(propertyIdValue); } } SenseUpdateBuilder.java000066400000000000000000000122431444772566300356070ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Objects; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link SenseUpdate} objects. */ public class SenseUpdateBuilder extends StatementDocumentUpdateBuilder { private TermUpdate glosses = TermUpdate.EMPTY; private SenseUpdateBuilder(SenseIdValue senseId, long revisionId) { super(senseId, revisionId); } private SenseUpdateBuilder(SenseDocument revision) { super(revision); } /** * Creates new builder object for constructing update of sense entity with given * revision ID. * * @param senseId * ID of the sense that is to be updated * @param revisionId * ID of the base sense revision to be updated or zero if not * available * @return update builder object * @throws NullPointerException * if {@code senseId} is {@code null} * @throws IllegalArgumentException * if {@code senseId} is a placeholder ID */ public static SenseUpdateBuilder forBaseRevisionId(SenseIdValue senseId, long revisionId) { return new SenseUpdateBuilder(senseId, revisionId); } /** * Creates new builder object for constructing update of sense entity with given * ID. * * @param senseId * ID of the sense that is to be updated * @return update builder object * @throws NullPointerException * if {@code senseId} is {@code null} * @throws IllegalArgumentException * if {@code senseId} is a placeholder ID */ public static SenseUpdateBuilder forEntityId(SenseIdValue senseId) { return new SenseUpdateBuilder(senseId, 0); } /** * Creates new builder object for constructing update of given base sense entity * revision. Provided sense document might not represent the latest revision of * the sense entity as currently stored in Wikibase. It will be used for * validation in builder methods. If the document has revision ID, it will be * used to detect edit conflicts. * * @param revision * base sense entity revision to be updated * @return update builder object * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ public static SenseUpdateBuilder forBaseRevision(SenseDocument revision) { return new SenseUpdateBuilder(revision); } @Override SenseIdValue getEntityId() { return (SenseIdValue) super.getEntityId(); } @Override SenseDocument getBaseRevision() { return (SenseDocument) super.getBaseRevision(); } @Override public SenseUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } /** * Updates sense glosses. If this method is called multiple times, changes are * accumulated. If base entity revision was provided, redundant changes are * silently ignored, resulting in empty update. * * @param update * changes in sense glosses * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public SenseUpdateBuilder updateGlosses(TermUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); TermUpdateBuilder combined = getBaseRevision() != null ? TermUpdateBuilder.forTerms(getBaseRevision().getGlosses().values()) : TermUpdateBuilder.create(); combined.append(glosses); combined.append(update); glosses = combined.build(); return this; } /** * Replays all changes in provided update into this builder object. Changes from * the update are added on top of changes already present in this builder * object. * * @param update * sense update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if {@code update} cannot be applied to base entity revision (if * available) */ public SenseUpdateBuilder append(SenseUpdate update) { super.append(update); updateGlosses(update.getGlosses()); return this; } @Override public SenseUpdate build() { return Datamodel.makeSenseUpdate(getEntityId(), getBaseRevisionId(), glosses, statements); } } StatementBuilder.java000066400000000000000000000212231444772566300353310ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.Value; public class StatementBuilder extends AbstractDataObjectBuilder { private final EntityIdValue subject; private final PropertyIdValue mainProperty; private Value mainValue = null; private boolean noMainValue = false; private final HashMap> qualifiers = new HashMap<>(); private String statementId = ""; private StatementRank rank = StatementRank.NORMAL; private final ArrayList references = new ArrayList<>(); /** * Constructor. * * @param subject * id of the entity that the constructed statement refers to * @param property * the id of the main property of the constructed statement */ protected StatementBuilder(EntityIdValue subject, PropertyIdValue property) { this.subject = subject; this.mainProperty = property; } /** * Starts the construction of a {@link Statement} with the given subject. * * @param subject * id of the entity that the constructed statement refers to * @param property * the id of the main property of the constructed statement * @return builder object to continue construction */ public static StatementBuilder forSubjectAndProperty(EntityIdValue subject, PropertyIdValue property) { return new StatementBuilder(subject, property); } @Override public Statement build() { prepareBuild(); return factory.getStatement(subject, getMainSnak(), getQualifierGroups(), references, rank, statementId); } /** * Sets the rank for the constructed statement. * * @param rank * the rank of the statement * @return builder object to continue construction */ public StatementBuilder withRank(StatementRank rank) { this.rank = rank; return getThis(); } /** * Sets the id for the constructed statement. * * @param statementId * the id of the statement * @return builder object to continue construction */ public StatementBuilder withId(String statementId) { this.statementId = statementId; return getThis(); } /** * Sets the main value for the constructed statement. * * @param value * the main value of the statement * @return builder object to continue construction */ public StatementBuilder withValue(Value value) { this.mainValue = value; return getThis(); } /** * Sets the main snak of the statement to be a {{@link SomeValueSnak}. * * @return builder object to continue construction */ public StatementBuilder withSomeValue() { this.mainValue = null; this.noMainValue = false; return getThis(); } /** * Sets the main snak of the statement to be a {{@link NoValueSnak}. * * @return builder object to continue construction */ public StatementBuilder withNoValue() { this.mainValue = null; this.noMainValue = true; return getThis(); } /** * Adds a qualifier with the given property and value to the constructed * statement. * * @param propertyIdValue * the property of the qualifier * @param value * the value of the qualifier * @return builder object to continue construction */ public StatementBuilder withQualifierValue(PropertyIdValue propertyIdValue, Value value) { withQualifier(factory.getValueSnak(propertyIdValue, value)); return getThis(); } /** * Adds a {@link SomeValueSnak} qualifier with the given property to the * constructed statement. * * @param propertyIdValue * the property of the qualifier * @return builder object to continue construction */ public StatementBuilder withQualifierSomeValue( PropertyIdValue propertyIdValue) { withQualifier(factory.getSomeValueSnak(propertyIdValue)); return getThis(); } /** * Adds a {@link NoValueSnak} qualifier with the given property to the * constructed statement. *

* Note that it might not be meaningful to use {@link NoValueSnak} in a * qualifier. It is usually implicitly assumed that all qualifiers that are * not given have no value for a particular statement. Otherwise one would * need large numbers of {@link NoValueSnak} qualifiers for every statement! * * @param propertyIdValue * the property of the qualifier * @return builder object to continue construction */ public StatementBuilder withQualifierNoValue(PropertyIdValue propertyIdValue) { withQualifier(factory.getNoValueSnak(propertyIdValue)); return getThis(); } /** * Adds a qualifier {@link Snak} to the constructed statement. * * @param qualifier * the qualifier to add * @return builder object to continue construction */ public StatementBuilder withQualifier(Snak qualifier) { getQualifierList(qualifier.getPropertyId()).add(qualifier); return getThis(); } /** * Adds all qualifiers from the given {@link SnakGroup} to the constructed * statement. * * @param qualifiers * the group of qualifiers to add * @return builder object to continue construction */ public StatementBuilder withQualifiers(SnakGroup qualifiers) { getQualifierList(qualifiers.getProperty()).addAll(qualifiers); return getThis(); } /** * Adds all qualifiers from the given list of {@link SnakGroup} to the * constructed statement. This is handy to copy all qualifiers from a given * statement. * * @param qualifiers * the list of groups of qualifiers to add * @return builder object to continue construction */ public StatementBuilder withQualifiers(List qualifiers) { for (SnakGroup sg : qualifiers) { withQualifiers(sg); } return getThis(); } /** * Adds a reference to the constructed statement. * * @param reference * the reference to be added * @return builder object to continue construction */ public StatementBuilder withReference(Reference reference) { this.references.add(reference); return getThis(); } /** * Adds a list of references to the constructed statement. * * @param references * the references to be added * @return builder object to continue construction */ public StatementBuilder withReferences(List references) { this.references.addAll(references); return getThis(); } @Override protected StatementBuilder getThis() { return this; } /** * Returns a list of {@link SnakGroup} objects for the currently stored * qualifiers. * * @return */ protected List getQualifierGroups() { ArrayList result = new ArrayList<>(this.qualifiers.size()); for (ArrayList statementList : this.qualifiers.values()) { result.add(factory.getSnakGroup(statementList)); } return result; } /** * Returns the list of {@link Snak} objects for a given qualifier property. * * @param propertyIdValue * @return */ protected ArrayList getQualifierList(PropertyIdValue propertyIdValue) { if(!qualifiers.containsKey(propertyIdValue)) { qualifiers.put(propertyIdValue, new ArrayList<>()); } return qualifiers.get(propertyIdValue); } /** * Returns the main {@link Snak} object for the constructed statement. * * @return */ protected Snak getMainSnak() { if (this.mainValue != null) { return factory.getValueSnak(this.mainProperty, this.mainValue); } else if (this.noMainValue) { return factory.getNoValueSnak(this.mainProperty); } else { return factory.getSomeValueSnak(this.mainProperty); } } } StatementDocumentUpdateBuilder.java000066400000000000000000000205141444772566300401750ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Objects; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; /** * Builder for incremental construction of {@link StatementDocumentUpdate} * objects. * * @see StatementUpdateBuilder */ public abstract class StatementDocumentUpdateBuilder extends EntityUpdateBuilder { StatementUpdate statements = StatementUpdate.EMPTY; /** * Initializes new builder object for constructing update of entity with given * ID. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is a placeholder ID */ protected StatementDocumentUpdateBuilder(EntityIdValue entityId, long revisionId) { super(entityId, revisionId); } /** * Initializes new builder object for constructing update of given base entity * revision. * * @param revision * base entity revision to be updated * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ protected StatementDocumentUpdateBuilder(StatementDocument revision) { super(revision); } /** * Creates new builder object for constructing update of entity with given * revision ID. *

* Supported entity IDs include {@link ItemIdValue}, {@link PropertyIdValue}, * {@link LexemeIdValue}, {@link FormIdValue}, {@link SenseIdValue}, and * {@link MediaInfoIdValue}. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it is a * placeholder ID */ public static StatementDocumentUpdateBuilder forBaseRevisionId(EntityIdValue entityId, long revisionId) { Objects.requireNonNull(entityId, "Entity ID cannot be null."); if (entityId instanceof SenseIdValue) { return SenseUpdateBuilder.forBaseRevisionId((SenseIdValue) entityId, revisionId); } if (entityId instanceof FormIdValue) { return FormUpdateBuilder.forBaseRevisionId((FormIdValue) entityId, revisionId); } if (entityId instanceof LexemeIdValue) { return LexemeUpdateBuilder.forBaseRevisionId((LexemeIdValue) entityId, revisionId); } return LabeledDocumentUpdateBuilder.forBaseRevisionId(entityId, revisionId); } /** * Creates new builder object for constructing update of entity with given ID. *

* Supported entity IDs include {@link ItemIdValue}, {@link PropertyIdValue}, * {@link LexemeIdValue}, {@link FormIdValue}, {@link SenseIdValue}, and * {@link MediaInfoIdValue}. * * @param entityId * ID of the entity that is to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it is a * placeholder ID */ public static StatementDocumentUpdateBuilder forEntityId(EntityIdValue entityId) { return forBaseRevisionId(entityId, 0); } /** * Creates new builder object for constructing update of given base entity * revision. Provided entity document might not represent the latest revision of * the entity as currently stored in Wikibase. It will be used for validation in * builder methods. If the document has revision ID, it will be used to detect * edit conflicts. *

* Supported entity types include {@link ItemDocument}, * {@link PropertyDocument}, {@link LexemeDocument}, {@link FormDocument}, * {@link SenseDocument}, and {@link MediaInfoDocument}. * * @param revision * base entity revision to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} is of unrecognized type or its ID is a * placeholder ID */ public static StatementDocumentUpdateBuilder forBaseRevision(StatementDocument revision) { Objects.requireNonNull(revision, "Base entity revision cannot be null."); if (revision instanceof SenseDocument) { return SenseUpdateBuilder.forBaseRevision((SenseDocument) revision); } if (revision instanceof FormDocument) { return FormUpdateBuilder.forBaseRevision((FormDocument) revision); } if (revision instanceof LexemeDocument) { return LexemeUpdateBuilder.forBaseRevision((LexemeDocument) revision); } if (revision instanceof LabeledStatementDocument) { return LabeledDocumentUpdateBuilder .forBaseRevision((LabeledStatementDocument) revision); } throw new IllegalArgumentException("Unrecognized entity document type."); } @Override StatementDocument getBaseRevision() { return (StatementDocument) super.getBaseRevision(); } /** * Updates entity statements. If this method is called multiple times, changes * are accumulated. If base entity revision was provided, the update is checked * against it and redundant changes are silently ignored, resulting in empty * update. * * @param update * statement update, possibly empty * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if replaced or removed statement is not present in current entity * revision (if available) */ public StatementDocumentUpdateBuilder updateStatements(StatementUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); StatementUpdateBuilder combined = getBaseRevision() != null ? StatementUpdateBuilder.forStatementGroups(getEntityId(), getBaseRevision().getStatementGroups()) : StatementUpdateBuilder.create(getEntityId()); combined.append(statements); combined.append(update); statements = combined.build(); return this; } void append(StatementDocumentUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); updateStatements(update.getStatements()); } /** * Creates new {@link StatementDocumentUpdate} object with contents of this * builder object. * * @return constructed object */ @Override public abstract StatementDocumentUpdate build(); } StatementUpdateBuilder.java000066400000000000000000000334501444772566300365010ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toMap; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; /** * Builder for incremental construction of {@link StatementUpdate} objects. * * @see StatementDocumentUpdateBuilder */ public class StatementUpdateBuilder { private final Map base; private EntityIdValue subject; private final List added = new ArrayList<>(); private final Map replaced = new HashMap<>(); private final Set removed = new HashSet<>(); private StatementUpdateBuilder(EntityIdValue subject, Collection base) { if (subject != null) { Validate.isTrue(!subject.isPlaceholder(), "Subject cannot be a placeholder ID."); this.subject = subject; } if (base != null) { for (Statement statement : base) { Objects.requireNonNull(statement, "Base document statements cannot be null."); Validate.isTrue( !statement.getSubject().isPlaceholder(), "Statement subject cannot be a placeholder ID."); if (this.subject != null) { Validate.isTrue(this.subject.equals(statement.getSubject()), "Inconsistent statement subject."); } else { this.subject = statement.getSubject(); } Validate.notBlank(statement.getStatementId(), "Base document statement must have valid ID."); } Validate.isTrue( base.stream().map(s -> s.getSubject()).distinct().count() <= 1, "Base document statements must all refer to the same subject."); Validate.isTrue( base.stream().map(s -> s.getStatementId()).distinct().count() == base.size(), "Base document statements must have unique IDs."); this.base = base.stream().collect(toMap(s -> s.getStatementId(), s -> s)); } else { this.base = null; } } /** * Creates new builder object for constructing statement update. * * @return update builder object */ public static StatementUpdateBuilder create() { return new StatementUpdateBuilder(null, null); } /** * Creates new builder object for constructing statement update of given * subject. All added or replaced statements must have the same subject ID. * * @param subject * statement subject or {@code null} for unspecified ID * @return update builder object * @throws IllegalArgumentException * if subject is a placeholder ID */ public static StatementUpdateBuilder create(EntityIdValue subject) { return new StatementUpdateBuilder(subject, null); } /** * Creates new builder object for constructing update of given base revision * statements. Provided statements will be used to check correctness of changes. *

* Since all changes will be checked after the {@link StatementUpdate} is passed * to {@link EntityDocumentBuilder} anyway, it is usually unnecessary to use * this method. It is simpler to initialize the builder with {@link #create()}. * * @param statements * statements from base revision of the document * @return update builder object * @throws NullPointerException * if {@code statements} or any of its items is {@code null} * @throws IllegalArgumentException * if any statement is missing statement ID or statement subjects * are inconsistent or placeholders */ public static StatementUpdateBuilder forStatements(Collection statements) { Objects.requireNonNull(statements, "Base document statement collection cannot be null."); return new StatementUpdateBuilder(null, statements); } /** * Creates new builder object for constructing update of given base revision * statements with given subject. Provided statements will be used to check * correctness of changes. All provided statements as well as added or replaced * statements must have the provided subject ID. *

* Since all changes will be checked after the {@link StatementUpdate} is passed * to {@link EntityDocumentBuilder} anyway, it is usually unnecessary to use * this method. It is simpler to initialize the builder with {@link #create()}. * * @param subject * statement subject or {@code null} for unspecified ID * @param statements * statements from base revision of the document * @return update builder object * @throws NullPointerException * if {@code statements} or any of its items is {@code null} * @throws IllegalArgumentException * if any statement is missing statement ID or statement subjects * are inconsistent or placeholders */ public static StatementUpdateBuilder forStatements(EntityIdValue subject, Collection statements) { Objects.requireNonNull(statements, "Base document statement collection cannot be null."); return new StatementUpdateBuilder(subject, statements); } /** * Creates new builder object for constructing update of given base revision * statement groups. Provided statements will be used to check correctness of * changes. *

* Since all changes will be checked after the {@link StatementUpdate} is passed * to {@link EntityDocumentBuilder} anyway, it is usually unnecessary to use * this method. It is simpler to initialize the builder with {@link #create()}. * * @param groups * statement groups from base revision of the document * @return update builder object * @throws NullPointerException * if {@code groups} is {@code null} * @throws IllegalArgumentException * if any group is {@code null} or any statement is missing * statement ID or statement subjects are inconsistent or * placeholders */ public static StatementUpdateBuilder forStatementGroups(Collection groups) { Objects.requireNonNull(groups, "Base document statement group collection cannot be null."); Validate.noNullElements(groups, "Base document statement groups cannot be null."); return new StatementUpdateBuilder(null, groups.stream().flatMap(g -> g.getStatements().stream()).collect(toList())); } /** * Creates new builder object for constructing update of given base revision * statement groups with given subject. Provided statements will be used to * check correctness of changes. All provided statements as well as added or * replaced statements must have the provided subject ID. *

* Since all changes will be checked after the {@link StatementUpdate} is passed * to {@link EntityDocumentBuilder} anyway, it is usually unnecessary to use * this method. It is simpler to initialize the builder with {@link #create()}. * * @param subject * statement subject or {@code null} for unspecified ID * @param groups * statement groups from base revision of the document * @return update builder object * @throws NullPointerException * if {@code groups} is {@code null} * @throws IllegalArgumentException * if any group is {@code null} or any statement is missing * statement ID or statement subjects are inconsistent or * placeholders */ public static StatementUpdateBuilder forStatementGroups(EntityIdValue subject, Collection groups) { Objects.requireNonNull(groups, "Base document statement group collection cannot be null."); Validate.noNullElements(groups, "Base document statement groups cannot be null."); return new StatementUpdateBuilder(subject, groups.stream().flatMap(g -> g.getStatements().stream()).collect(toList())); } /** * Adds statement to the entity. If {@code statement} has an ID (perhaps because * it is a modified copy of another statement), its ID is stripped to ensure the * statement is added and no other statement is modified. * * @param statement * new statement to add * @return {@code this} (fluent method) * @throws NullPointerException * if {@code statement} is {@code null} * @throws IllegalArgumentException * if statement's subject is inconsistent with other statements or * it is a placeholder ID */ public StatementUpdateBuilder add(Statement statement) { Objects.requireNonNull(statement, "Statement cannot be null."); Validate.isTrue( !statement.getSubject().isPlaceholder(), "Statement subject cannot be a placeholder ID."); if (subject != null) { Validate.isTrue(subject.equals(statement.getSubject()), "Inconsistent statement subject."); } if (!statement.getStatementId().isEmpty()) { statement = statement.withStatementId(""); } added.add(statement); if (subject == null) { subject = statement.getSubject(); } return this; } /** * Replaces existing statement in the entity. Provided {@code statement} must * have statement ID identifying statement to replace. Calling this method * overrides any previous changes made to the same statement ID by this method * or {@link #remove(String)}. *

* If base revision statements were provided, existence of the statement is * checked. Any attempt to replace some statement with identical statement is * silently ignored, resulting in empty update. * * @param statement * replacement for existing statement * @return {@code this} (fluent method) * @throws NullPointerException * if {@code statement} is {@code null} * @throws IllegalArgumentException * if {@code statement} does not have statement ID or it is not * among base revision statements (if available) or its subject is * inconsistent with other statements or a placeholder ID */ public StatementUpdateBuilder replace(Statement statement) { Objects.requireNonNull(statement, "Statement cannot be null."); Validate.isTrue( !statement.getSubject().isPlaceholder(), "Statement subject cannot be a placeholder ID."); Validate.notEmpty(statement.getStatementId(), "Statement must have an ID."); if (subject != null) { Validate.isTrue(subject.equals(statement.getSubject()), "Inconsistent statement subject."); } if (base != null) { Statement original = base.get(statement.getStatementId()); Validate.isTrue(original != null, "Replaced statement is not in base revision."); if (statement.equals(original)) { replaced.remove(statement.getStatementId()); removed.remove(statement.getStatementId()); return this; } } replaced.put(statement.getStatementId(), statement); removed.remove(statement.getStatementId()); if (subject == null) { subject = statement.getSubject(); } return this; } /** * Removes existing statement from the entity. Calling this method overrides any * previous changes made to the same statement ID by * {@link #replace(Statement)}. Removing the same statement ID twice is silently * tolerated. *

* If base revision statements were provided, this method checks that statement * with this ID exists in the base revision. * * @param statementId * ID of the removed statement * @return {@code this} (fluent method) * @throws NullPointerException * if {@code statementId} is {@code null} * @throws IllegalArgumentException * if {@code statementId} is empty or it is not among base revision * statements (if available) */ public StatementUpdateBuilder remove(String statementId) { Validate.notBlank(statementId, "Statement ID must not be empty."); if (base != null) { Statement original = base.get(statementId); Validate.isTrue(original != null, "Removed statement is not in base revision."); } removed.add(statementId); replaced.remove(statementId); return this; } /** * Replays all changes in provided update into this builder object. Changes are * performed as if by calling {@link #add(Statement)}, * {@link #replace(Statement)}, and {@link #remove(String)} methods. * * @param update * statement update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} * @throws IllegalArgumentException * if updated or removed statement is not among base revision * statements (if available) */ public StatementUpdateBuilder append(StatementUpdate update) { Objects.requireNonNull(update, "Statement update cannot be null."); for (Statement statement : update.getAdded()) { add(statement); } for (Statement statement : update.getReplaced().values()) { replace(statement); } for (String statementId : update.getRemoved()) { remove(statementId); } return this; } /** * Creates new {@link StatementUpdate} object with contents of this builder * object. * * @return constructed object */ public StatementUpdate build() { return Datamodel.makeStatementUpdate(added, replaced.values(), removed); } } TermUpdateBuilder.java000066400000000000000000000134431444772566300354440ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static java.util.stream.Collectors.toMap; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; /** * Builder for incremental construction of {@link TermUpdate} objects. */ public class TermUpdateBuilder { private final Map base; private final Map modified = new HashMap<>(); private final Set removed = new HashSet<>(); private TermUpdateBuilder(Collection base) { if (base != null) { for (MonolingualTextValue value : base) { Objects.requireNonNull(value, "Base document terms cannot be null."); } Validate.isTrue( base.stream().map(v -> v.getLanguageCode()).distinct().count() == base.size(), "Base document terms must have unique language codes."); this.base = base.stream().collect(toMap(v -> v.getLanguageCode(), v -> v)); } else this.base = null; } /** * Creates new builder object for constructing term update. * * @return update builder object */ public static TermUpdateBuilder create() { return new TermUpdateBuilder(null); } /** * Creates new builder object for constructing update of given base revision * terms. Provided terms will be used to check correctness of changes. *

* Since all changes will be checked after the {@link TermUpdate} is passed to * {@link EntityUpdateBuilder} anyway, it is usually unnecessary to use this * method. It is simpler to initialize the builder with {@link #create()}. * * @param terms * terms from base revision of the document * @return update builder object * @throws NullPointerException * if {@code terms} or any of its items is {@code null} * @throws IllegalArgumentException * if there are duplicate items in {@code terms} */ public static TermUpdateBuilder forTerms(Collection terms) { Objects.requireNonNull(terms, "Base document term collection cannot be null."); return new TermUpdateBuilder(terms); } /** * Adds or changes term. If a term with this language code already exists, it is * replaced. Terms with other language codes are not touched. Calling this * method overrides any previous changes made with the same language code by * this method or {@link #remove(String)}. *

* If base revision terms were provided, attempt to overwrite some term with the * same value will be silently ignored, resulting in empty update. * * @param term * term to add or change * @return {@code this} (fluent method) * @throws NullPointerException * if {@code term} is {@code null} */ public TermUpdateBuilder put(MonolingualTextValue term) { Objects.requireNonNull(term, "Term cannot be null."); if (base != null) { if (term.equals(base.get(term.getLanguageCode()))) { modified.remove(term.getLanguageCode()); removed.remove(term.getLanguageCode()); return this; } } modified.put(term.getLanguageCode(), term); removed.remove(term.getLanguageCode()); return this; } /** * Removes term. Terms with other language codes are not touched. Calling this * method overrides any previous changes made with the same language code by * this method or {@link #put(MonolingualTextValue)}. *

* If base revision terms were provided, attempts to remove missing terms will * be silently ignored, resulting in empty update. * * @param languageCode * language code of the removed term * @return {@code this} (fluent method) * @throws NullPointerException * if {@code languageCode} is {@code null} * @throws IllegalArgumentException * if {@code languageCode} is blank */ public TermUpdateBuilder remove(String languageCode) { Validate.notBlank(languageCode, "Language code must be provided."); if (base != null && !base.containsKey(languageCode)) { modified.remove(languageCode); return this; } removed.add(languageCode); modified.remove(languageCode); return this; } /** * Replays all changes in provided update into this builder object. Changes are * performed as if by calling {@link #put(MonolingualTextValue)} and * {@link #remove(String)} methods. * * @param update * term update to replay * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public TermUpdateBuilder append(TermUpdate update) { Objects.requireNonNull(update, "Term update cannot be null."); for (MonolingualTextValue term : update.getModified().values()) { put(term); } for (String language : update.getRemoved()) { remove(language); } return this; } /** * Creates new {@link TermUpdate} object with contents of this builder object. * * @return constructed object */ public TermUpdate build() { return Datamodel.makeTermUpdate(modified.values(), removed); } } TermedDocumentUpdateBuilder.java000066400000000000000000000220571444772566300374550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocument; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocumentUpdate; /** * Builder for incremental construction of {@link TermedStatementDocumentUpdate} * objects. */ public abstract class TermedDocumentUpdateBuilder extends LabeledDocumentUpdateBuilder { TermUpdate descriptions = TermUpdate.EMPTY; final Map aliases = new HashMap<>(); /** * Initializes new builder object for constructing update of entity with given * ID. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is a placeholder ID */ protected TermedDocumentUpdateBuilder(EntityIdValue entityId, long revisionId) { super(entityId, revisionId); } /** * Initializes new builder object for constructing update of given base entity * revision. * * @param revision * base entity revision to be updated * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} has placeholder ID */ protected TermedDocumentUpdateBuilder(TermedStatementDocument revision) { super(revision); } /** * Creates new builder object for constructing update of entity with given * revision ID. *

* Supported entity IDs include {@link ItemIdValue} and {@link PropertyIdValue}. * * @param entityId * ID of the entity that is to be updated * @param revisionId * ID of the base entity revision to be updated or zero if not * available * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it is a * placeholder ID */ public static TermedDocumentUpdateBuilder forBaseRevisionId(EntityIdValue entityId, long revisionId) { Objects.requireNonNull(entityId, "Entity ID cannot be null."); if (entityId instanceof ItemIdValue) { return ItemUpdateBuilder.forBaseRevisionId((ItemIdValue) entityId, revisionId); } if (entityId instanceof PropertyIdValue) { return PropertyUpdateBuilder.forBaseRevisionId((PropertyIdValue) entityId, revisionId); } throw new IllegalArgumentException("Unrecognized entity ID type."); } /** * Creates new builder object for constructing update of entity with given ID. *

* Supported entity IDs include {@link ItemIdValue} and {@link PropertyIdValue}. * * @param entityId * ID of the entity that is to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is of unrecognized type or it is a * placeholder ID */ public static TermedDocumentUpdateBuilder forEntityId(EntityIdValue entityId) { return forBaseRevisionId(entityId, 0); } /** * Creates new builder object for constructing update of given base entity * revision. Provided entity document might not represent the latest revision of * the entity as currently stored in Wikibase. It will be used for validation in * builder methods. If the document has revision ID, it will be used to detect * edit conflicts. *

* Supported entity types include {@link ItemDocument} and * {@link PropertyDocument}. * * @param revision * base entity revision to be updated * @return builder object matching entity type * @throws NullPointerException * if {@code revision} is {@code null} * @throws IllegalArgumentException * if {@code revision} is of unrecognized type or its ID is a * placeholder ID */ public static TermedDocumentUpdateBuilder forBaseRevision(TermedStatementDocument revision) { Objects.requireNonNull(revision, "Base entity revision cannot be null."); if (revision instanceof ItemDocument) { return ItemUpdateBuilder.forBaseRevision((ItemDocument) revision); } if (revision instanceof PropertyDocument) { return PropertyUpdateBuilder.forBaseRevision((PropertyDocument) revision); } throw new IllegalArgumentException("Unrecognized entity document type."); } @Override TermedStatementDocument getBaseRevision() { return (TermedStatementDocument) super.getBaseRevision(); } @Override public TermedDocumentUpdateBuilder updateStatements(StatementUpdate update) { super.updateStatements(update); return this; } @Override public TermedDocumentUpdateBuilder updateLabels(TermUpdate update) { super.updateLabels(update); return this; } /** * Updates entity descriptions. If this method is called multiple times, changes * are accumulated. If base entity revision was provided, redundant changes are * silently ignored, resulting in empty update. * * @param update * changes in entity descriptions * @return {@code this} (fluent method) * @throws NullPointerException * if {@code update} is {@code null} */ public TermedDocumentUpdateBuilder updateDescriptions(TermUpdate update) { Objects.requireNonNull(update, "Update cannot be null."); TermUpdateBuilder combined = getBaseRevision() != null ? TermUpdateBuilder.forTerms(getBaseRevision().getDescriptions().values()) : TermUpdateBuilder.create(); combined.append(descriptions); combined.append(update); descriptions = combined.build(); return this; } /** * Updates entity aliases. If this method is called multiple times, changes are * accumulated. If base entity revision was provided, the update is checked * against it and redundant changes are silently ignored, resulting in empty * update. * * @param language * language code of the altered aliases * @param update * alias changes * @return {@code this} (fluent method) * @throws NullPointerException * if {@code language} or {@code aliases} is {@code null} * @throws IllegalArgumentException * if {@code language} is blank or {@code aliases} has inconsistent * language code */ public TermedDocumentUpdateBuilder updateAliases(String language, AliasUpdate update) { Validate.notBlank(language, "Specify language code."); Objects.requireNonNull(update, "Alias update cannot be null."); if (update.getLanguageCode().isPresent()) { Validate.isTrue(language.equals(update.getLanguageCode().get()), "Alias update must have matching language code."); } AliasUpdateBuilder builder; if (getBaseRevision() != null) { builder = AliasUpdateBuilder .forAliases(getBaseRevision().getAliases().getOrDefault(language, Collections.emptyList())); } else { builder = AliasUpdateBuilder.create(); } builder.append(aliases.getOrDefault(language, AliasUpdate.EMPTY)); builder.append(update); AliasUpdate combined = builder.build(); if (!combined.isEmpty()) { aliases.put(language, combined); } else { aliases.remove(language); } return this; } void append(TermedStatementDocumentUpdate update) { super.append(update); updateDescriptions(update.getDescriptions()); for (Map.Entry entry : update.getAliases().entrySet()) { updateAliases(entry.getKey(), entry.getValue()); } } /** * Creates new {@link TermedStatementDocumentUpdate} object with contents of * this builder object. * * @return constructed object */ @Override public abstract TermedStatementDocumentUpdate build(); } ToString.java000066400000000000000000000516101444772566300336320ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.*; import java.text.DecimalFormat; import java.util.SortedSet; import java.util.TreeSet; /** * Static class for computing a toString of arbitrary data objects using only * their Stringerfaces. This can be used to implement the toString() method of * arbitrary Stringerface implementations. More efficient solutions might exist * if the object that implements an Stringerface is of a specific known type, * but the methods here could always be used as a fallback or default. *

* The methods here are only meant for human consumption. Generic methods for * serializing certain data values in well-defined string formats are found in * {@link DataFormatter} instead. * * @author Markus Kroetzsch * */ public class ToString { /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(ItemIdValue o) { return o.getIri() + " (item)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(PropertyIdValue o) { return o.getIri() + " (property)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(LexemeIdValue o) { return o.getIri() + " (lexeme)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(FormIdValue o) { return o.getIri() + " (form)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(SenseIdValue o) { return o.getIri() + " (sense)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(MediaInfoIdValue o) { return o.getIri() + " (media-info)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(DatatypeIdValue o) { return o.getIri() + " (" + o.getJsonString() + ")"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(TimeValue o) { DecimalFormat timeForm = new DecimalFormat("00"); return o.getYear() + "-" + timeForm.format(o.getMonth()) + "-" + timeForm.format(o.getDay()) + (o.getPrecision() > TimeValue.PREC_DAY || o.getHour() + o.getMinute() + o.getSecond() > 0 ? ("T" + timeForm.format(o.getHour()) + ":" + timeForm.format(o.getMinute()) + ":" + timeForm .format(o.getSecond())) : "") + " (Prec.: " + getTimePrecisionString(o.getPrecision()) + " [-" + o.getBeforeTolerance() + " .. +" + o.getAfterTolerance() + "]" + ", PCal: " + getCalendarString(o.getPreferredCalendarModel()) + (o.getTimezoneOffset() != 0 ? (", Timezone: " + o.getTimezoneOffset() + "min ") : "") + ")"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(GlobeCoordinatesValue o) { return (o.getLatitude() / GlobeCoordinatesValue.PREC_DEGREE) + ":" + (o.getLongitude() / GlobeCoordinatesValue.PREC_DEGREE) + " (" + getGlobeString(o.getGlobe()) + ")"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(StringValue o) { return "\"" + o.getString().replace("\"", "\\\"") + "\""; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(MonolingualTextValue o) { return "\"" + o.getText().replace("\"", "\\\"") + "\" (" + o.getLanguageCode() + ")"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(QuantityValue o) { String str = o.getNumericValue().toString(); if(o.getLowerBound() != null && o.getUpperBound() != null) { str += " [" + o.getLowerBound().toString() + " .. " + o.getUpperBound().toString() + "]"; } if(!"1".equals(o.getUnit())) { str += " " + o.getUnit(); } return str; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(UnsupportedValue o) { return "unsupported value of type "+o.getTypeJsonString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(UnsupportedEntityIdValue o) { return o.getIri() + " (unsupported)"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(ValueSnak o) { return o.getPropertyId().getIri() + " :: " + o.getValue().toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(SomeValueSnak o) { return o.getPropertyId().getIri() + " has some value"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(NoValueSnak o) { return o.getPropertyId().getIri() + " has no value"; } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(SnakGroup o) { final String indent = " "; StringBuilder result = new StringBuilder(); for (Snak s : o.getSnaks()) { result.append(indent) .append(s.toString()) .append("\n"); } return result.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(Claim o) { StringBuilder sb = new StringBuilder(); sb.append(o.getSubject()).append(": "); sb.append(o.getMainSnak().toString()).append("\n"); for (SnakGroup s : o.getQualifiers()) { sb.append(toString(s)); } return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(Reference o) { StringBuilder sb = new StringBuilder(); sb.append(" Reference:\n"); for (SnakGroup s : o.getSnakGroups()) { sb.append(toString(s)); } return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(Statement o) { StringBuilder sb = new StringBuilder(); sb.append("[ID ").append(o.getStatementId()).append("] "); sb.append(o.getSubject()).append(": "); sb.append(o.getMainSnak().toString()).append("\n"); for (SnakGroup s : o.getQualifiers()) { sb.append(toString(s)); } if (o.getRank() != StatementRank.NORMAL) { sb.append(" Rank: ").append(o.getRank()).append("\n"); } for (Reference r : o.getReferences()) { sb.append(toString(r)); } return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(StatementGroup o) { StringBuilder sb = new StringBuilder(); for (Statement s : o) { sb.append(toString(s)); } return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(SiteLink o) { if (o.getBadges().isEmpty()) { return o.getSiteKey() + "/" + o.getPageTitle(); } else { return o.getSiteKey() + "/" + o.getPageTitle() + " " + o.getBadges(); } } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(PropertyDocument o) { return "==PropertyDocument " + o.getEntityId().getIri() + " (r" + o.getRevisionId() + ") ==\n" + "* Datatype: " + o.getDatatype() + toStringForTermedDocument(o) + toStringForStatementDocument(o); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(ItemDocument o) { StringBuilder sb = new StringBuilder(); sb.append("==ItemDocument ").append(o.getEntityId().getIri()); sb.append(" (r").append(o.getRevisionId()).append(") "); sb.append("==").append(toStringForTermedDocument(o)); sb.append(toStringForStatementDocument(o)); sb.append("* Site links: "); boolean first = true; SortedSet siteKeys = new TreeSet<>(o.getSiteLinks() .keySet()); for (String key : siteKeys) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(o.getSiteLinks().get(key))); } return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(LexemeDocument o) { StringBuilder sb = new StringBuilder(); sb.append("==LexemeDocument ").append(o.getEntityId().getIri()); sb.append(" (r").append(o.getRevisionId()).append(") "); sb.append("=="); sb.append("\n* Lexical category: ").append(o.getLexicalCategory().getIri()); sb.append("\n* Language: ").append(o.getLanguage().getIri()); boolean first; sb.append("\n* Lemmas: "); first = true; SortedSet labelKeys = new TreeSet<>(o.getLemmas().keySet()); for (String key : labelKeys) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(o.getLemmas().get(key))); } sb.append(toStringForStatementDocument(o)); sb.append("\n* Forms: \n"); for(FormDocument form : o.getForms()) { sb.append(form.toString()).append('\n'); } sb.append("\n* Senses: \n"); for(SenseDocument sense : o.getSenses()) { sb.append(sense.toString()).append('\n'); } return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(FormDocument o) { StringBuilder sb = new StringBuilder(); sb.append("==FormDocument ").append(o.getEntityId().getIri()); sb.append(" (r").append(o.getRevisionId()).append(") "); sb.append("=="); boolean first; sb.append("\n* Lemmas: "); first = true; SortedSet labelKeys = new TreeSet<>(o.getRepresentations().keySet()); for (String key : labelKeys) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(o.getRepresentations().get(key))); } sb.append("\n* Grammatical features: "); for(ItemIdValue feature : o.getGrammaticalFeatures()) { sb.append(feature.toString()).append(' '); } sb.append(toStringForStatementDocument(o)); return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(SenseDocument o) { StringBuilder sb = new StringBuilder(); sb.append("==SenseDocument ").append(o.getEntityId().getIri()); sb.append(" (r").append(o.getRevisionId()).append(") "); sb.append("=="); boolean first; sb.append("\n* Lemmas: "); first = true; SortedSet labelKeys = new TreeSet<>(o.getGlosses().keySet()); for (String key : labelKeys) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(o.getGlosses().get(key))); } sb.append(toStringForStatementDocument(o)); return sb.toString(); } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(MediaInfoDocument o) { StringBuilder sb = new StringBuilder(); sb.append("==MediaInfoDocument ").append(o.getEntityId().getIri()); sb.append(" (r").append(o.getRevisionId()).append(") "); sb.append("=="); sb.append(toStringForLabeledDocument(o)); sb.append(toStringForStatementDocument(o)); return sb.toString(); } protected static String toStringForStatementDocument(StatementDocument o) { StringBuilder sb = new StringBuilder(); sb.append("\n===Statements===\n"); for (StatementGroup sg : o.getStatementGroups()) { sb.append(toString(sg)); } sb.append("\n===End of statements===\n"); return sb.toString(); } /** * Returns a human-readable string representation of the given * {@link TermedDocument}. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ protected static String toStringForTermedDocument(TermedDocument o) { StringBuilder sb = new StringBuilder(); boolean first; sb.append(toStringForLabeledDocument(o)); sb.append("\n* Descriptions: "); first = true; SortedSet descriptionKeys = new TreeSet<>(o .getDescriptions().keySet()); for (String key : descriptionKeys) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(o.getDescriptions().get(key))); } sb.append("\n* Aliases: "); first = true; SortedSet aliasKeys = new TreeSet<>(o.getAliases() .keySet()); for (String key : aliasKeys) { for (MonolingualTextValue mtv : o.getAliases().get(key)) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(mtv)); } } return sb.toString(); } /** * Returns a human-readable string representation of the given * {@link LabeledDocument}. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ private static String toStringForLabeledDocument(LabeledDocument o) { StringBuilder sb = new StringBuilder(); boolean first; sb.append("\n* Labels: "); first = true; SortedSet labelKeys = new TreeSet<>(o.getLabels() .keySet()); for (String key : labelKeys) { if (first) { first = false; } else { sb.append("; "); } sb.append(toString(o.getLabels().get(key))); } return sb.toString(); } /** * Returns a human-readable string representation of a reference to a globe * on which coordinates may be present. Known globes (such as Earth) are * replaced by their common English names. * * @param globeIri * the globe item's IRI * @return a string representation of the globe */ protected static String getGlobeString(String globeIri) { switch (globeIri) { case GlobeCoordinatesValue.GLOBE_EARTH: return "Earth"; default: return globeIri; } } /** * Returns a human-readable string representation of a reference to a * calendar model that is used for a time value. Known calendars (such as * proleptic Gregorian calendar) are replaced by short English names. The * term "proleptic" is omitted. * * @param calendarModel * the calendar model item's IRI * @return a string representation of the calendar model */ protected static String getCalendarString(String calendarModel) { switch (calendarModel) { case TimeValue.CM_GREGORIAN_PRO: return "Gregorian"; case TimeValue.CM_JULIAN_PRO: return "Julian"; default: return calendarModel; } } /** * Returns a human-readable string representation of a reference to a * precision that is used for a time value. * * @param precision * the numeric precision * @return a string representation of the precision */ protected static String getTimePrecisionString(byte precision) { switch (precision) { case TimeValue.PREC_SECOND: return "sec"; case TimeValue.PREC_MINUTE: return "min"; case TimeValue.PREC_HOUR: return "hour"; case TimeValue.PREC_DAY: return "day"; case TimeValue.PREC_MONTH: return "month"; case TimeValue.PREC_YEAR: return "year"; case TimeValue.PREC_DECADE: return "decade"; case TimeValue.PREC_100Y: return "100 years"; case TimeValue.PREC_1KY: return "1000 years"; case TimeValue.PREC_10KY: return "10K years"; case TimeValue.PREC_100KY: return "100K years"; case TimeValue.PREC_1MY: return "1 million years"; case TimeValue.PREC_10MY: return "10 million years"; case TimeValue.PREC_100MY: return "100 million years"; case TimeValue.PREC_1GY: return "1000 million years"; default: return "Unsupported precision " + precision; } } /** * Returns a human-readable string representation of the given object. * * @see java.lang.Object#toString() * @param o * the object to represent as string * @return a string representation of the object */ public static String toString(EntityRedirectDocument o) { return "==EntityRedirect " + o.getEntityId().getIri() + " (r" + o.getRevisionId() + ") " + "==\n" + "Target: " + o.getTargetId().getIri(); } } package-info.java000066400000000000000000000014741444772566300344100ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/helpers/** * Package for mostly static helper code to work with data objects. * * @author Markus Kroetzsch * */ package org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/000077500000000000000000000000001444772566300326555ustar00rootroot00000000000000AliasUpdateImpl.java000066400000000000000000000133231444772566300364610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toSet; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonValue; /** * Jackson implementation of {@link AliasUpdate}. */ public class AliasUpdateImpl implements AliasUpdate { @JsonIgnore private final String languageCode; @JsonIgnore private final List recreated; private final List added; private final Set removed; /** * Initializes new alias update. This update applies to aliases in one language * only. Callers should specify either {@code recreated} parameter or * {@code added} and {@code removed} parameters, because combination of the two * update approaches is not possible. To remove all aliases, pass empty list in * {@code recreated} parameter. * * @param recreated * new list of aliases that completely replaces the old ones or * {@code null} to not recreate aliases * @param added * aliases added in this update or empty collection for no additions * @param removed * aliases removed in this update or empty collection for no removals * @throws NullPointerException * if {@code added}, {@code removed}, or any alias is {@code null} * @throws IllegalArgumentException * if given invalid combination of parameters */ public AliasUpdateImpl(List recreated, List added, Collection removed) { Objects.requireNonNull(added, "List of added aliases cannot be null."); Objects.requireNonNull(removed, "List of removed aliases cannot be null."); Validate.isTrue(recreated == null || added.isEmpty() && removed.isEmpty(), "Cannot combine additions/removals with recreating the alias list."); List all = new ArrayList<>(); if (recreated != null) { all.addAll(recreated); } all.addAll(added); all.addAll(removed); for (MonolingualTextValue alias : all) { Validate.notNull(alias, "Alias object cannot be null."); } Validate.isTrue(all.stream().map(v -> v.getLanguageCode()).distinct().count() <= 1, "Inconsistent language codes."); if (recreated != null) { Validate.isTrue(recreated.stream().distinct().count() == recreated.size(), "Every alias in the new list of aliases must be unique."); } Validate.isTrue(added.stream().distinct().count() == added.size(), "Every new alias must be unique."); Validate.isTrue(removed.stream().distinct().count() == removed.size(), "Every removed alias must be unique."); Validate.isTrue(all.stream().distinct().count() == all.size(), "Cannot add and remove the same alias."); languageCode = all.stream().map(v -> v.getLanguageCode()).findFirst().orElse(null); this.recreated = recreated != null ? Collections.unmodifiableList(recreated.stream().map(TermImpl::new).collect(toList())) : null; this.added = Collections.unmodifiableList(added.stream().map(AddedAlias::new).collect(toList())); this.removed = Collections.unmodifiableSet(removed.stream().map(RemovedAlias::new).collect(toSet())); } @JsonIgnore @Override public boolean isEmpty() { return recreated == null && added.isEmpty() && removed.isEmpty(); } @JsonIgnore @Override public Optional getLanguageCode() { return Optional.ofNullable(languageCode); } @JsonIgnore @Override public Optional> getRecreated() { return Optional.ofNullable(recreated); } @JsonIgnore @Override public List getAdded() { return added; } @JsonIgnore @Override public Set getRemoved() { return removed; } static class AddedAlias extends TermImpl { AddedAlias(MonolingualTextValue alias) { super(alias); } @JsonProperty("add") String getAddCommand() { return ""; } } static class RemovedAlias extends TermImpl { RemovedAlias(MonolingualTextValue alias) { super(alias); } @JsonProperty("remove") String getRemoveCommand() { return ""; } } @JsonValue List toJson() { List items = new ArrayList<>(); if (recreated != null) { items.addAll(recreated); } items.addAll(removed); items.addAll(added); if (items.isEmpty() && recreated == null) { return null; } return items; } @Override public boolean equals(Object obj) { return Equality.equalsAliasUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } ClaimImpl.java000066400000000000000000000053071444772566300353150ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.Iterator; import java.util.List; /** * Helper class to represent a {@link Claim}. * This is a ffacade for a {@link Statement} * * @author Fredo Erxleben * @author Antonin Delpeuch */ public class ClaimImpl implements Claim { private final Statement statement; /** * Constructor to create a claim. This internally creates * a new statement, so if you want to create a statement later * on just use {@link StatementImpl} directly. * * @param subject * the subject the Claim refers to * @param mainSnak * the main Snak of the Claim * @param qualifiers * the qualifiers of the Claim, grouped in SnakGroups */ public ClaimImpl( EntityIdValue subject, Snak mainSnak, List qualifiers) { this.statement = new StatementImpl(null, StatementRank.NORMAL, mainSnak, qualifiers, null, subject); } /** * Constructor used to initialize a claim from a JacksonStatement, * should only be used internally. * * @param statement * the statement which contains this claim */ public ClaimImpl(StatementImpl statement) { this.statement = statement; } @Override public EntityIdValue getSubject() { return statement.getSubject(); } @Override public Snak getMainSnak() { return statement.getMainSnak(); } @Override public List getQualifiers() { return statement.getQualifiers(); } @Override public Iterator getAllQualifiers() { return statement.getAllQualifiers(); } @Override public Value getValue() { return statement.getValue(); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsClaim(this, obj); } @Override public String toString() { return ToString.toString(this); } } DataObjectFactoryImpl.java000066400000000000000000000334121444772566300376160ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; /** * Factory implementation to create Jackson versions of the datamodel objects, * where available. * * @author Markus Kroetzsch * */ public class DataObjectFactoryImpl implements DataObjectFactory { @Override public ItemIdValue getItemIdValue(String id, String siteIri) { return new ItemIdValueImpl(id, siteIri); } @Override public PropertyIdValue getPropertyIdValue(String id, String siteIri) { return new PropertyIdValueImpl(id, siteIri); } @Override public LexemeIdValue getLexemeIdValue(String id, String siteIri) { return new LexemeIdValueImpl(id, siteIri); } @Override public FormIdValue getFormIdValue(String id, String siteIri) { return new FormIdValueImpl(id, siteIri); } @Override public SenseIdValue getSenseIdValue(String id, String siteIri) { return new SenseIdValueImpl(id, siteIri); } @Override public MediaInfoIdValue getMediaInfoIdValue(String id, String siteIri) { return new MediaInfoIdValueImpl(id, siteIri); } @Override public DatatypeIdValue getDatatypeIdValue(String id) { return new DatatypeIdImpl(id); } @Override public DatatypeIdValue getDatatypeIdValueFromJsonId(String jsonId) { return new DatatypeIdImpl(null, jsonId); } @Override public TimeValue getTimeValue(long year, byte month, byte day, byte hour, byte minute, byte second, byte precision, int beforeTolerance, int afterTolerance, int timezoneOffset, String calendarModel) { return new TimeValueImpl(year, month, day, hour, minute, second, precision, beforeTolerance, afterTolerance, timezoneOffset, calendarModel); } @Override public GlobeCoordinatesValue getGlobeCoordinatesValue(double latitude, double longitude, double precision, String globeIri) { if (precision <= 0) { throw new IllegalArgumentException( "Coordinates precision must be non-zero positive. Given value: " + precision); } return new GlobeCoordinatesValueImpl(latitude, longitude, precision, globeIri); } @Override public StringValue getStringValue(String string) { return new StringValueImpl(string); } @Override public MonolingualTextValue getMonolingualTextValue(String text, String languageCode) { return new MonolingualTextValueImpl(text, languageCode); } @Override public QuantityValue getQuantityValue(BigDecimal numericValue) { return getQuantityValue(numericValue, null, null, (ItemIdValue)null); } @Override public QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound) { return getQuantityValue(numericValue, lowerBound, upperBound, (ItemIdValue)null); } @Override public QuantityValue getQuantityValue(BigDecimal numericValue, String unit) { return getQuantityValue(numericValue, null, null, unit); } @Override public QuantityValue getQuantityValue(BigDecimal numericValue, ItemIdValue unit) { return getQuantityValue(numericValue, null, null, unit); } @Override @Deprecated public QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, String unit) { return new QuantityValueImpl(numericValue, lowerBound, upperBound, unit); } @Override public QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, ItemIdValue unit) { return new QuantityValueImpl(numericValue, lowerBound, upperBound, unit); } /** * Creates a {@link ValueSnakImpl}. Value snaks in JSON need to know the * datatype of their property, which is not given in the parameters of this * method. The snak that will be returned will use a default type based on * the kind of value that is used (usually the "simplest" type for that * value). This may not be desired. * * @see DataObjectFactory#getValueSnak(PropertyIdValue, Value) */ @Override public ValueSnak getValueSnak(PropertyIdValue propertyId, Value value) { return new ValueSnakImpl(propertyId, value); } @Override public SomeValueSnak getSomeValueSnak(PropertyIdValue propertyId) { return new SomeValueSnakImpl( propertyId.getId(), propertyId.getSiteIri()); } @Override public NoValueSnak getNoValueSnak(PropertyIdValue propertyId) { return new NoValueSnakImpl( propertyId.getId(), propertyId.getSiteIri()); } @Override public SnakGroup getSnakGroup(List snaks) { return new SnakGroupImpl(new ArrayList<>(snaks)); } @Override public Claim getClaim(EntityIdValue subject, Snak mainSnak, List qualifiers) { // Jackson claims cannot exist without a statement. return getStatement( subject, mainSnak, qualifiers, Collections. emptyList(), StatementRank.NORMAL, "empty id 12345").getClaim(); } @Override public Reference getReference(List snakGroups) { return new ReferenceImpl(snakGroups); } @Override public Statement getStatement(Claim claim, List references, StatementRank rank, String statementId) { return getStatement(claim.getSubject(), claim.getMainSnak(), claim.getQualifiers(), references, rank, statementId); } @Override public Statement getStatement(EntityIdValue subject, Snak mainSnak, List qualifiers, List references, StatementRank rank, String statementId) { return new StatementImpl(statementId, rank, mainSnak, qualifiers, references, subject); } @Override public StatementGroup getStatementGroup(List statements) { return new StatementGroupImpl(statements); } @Override public SiteLink getSiteLink(String title, String siteKey, List badges) { return new SiteLinkImpl(title, siteKey, badges); } @Override public PropertyDocument getPropertyDocument(PropertyIdValue propertyId, List labels, List descriptions, List aliases, List statementGroups, DatatypeIdValue datatypeId, long revisionId) { return new PropertyDocumentImpl( propertyId, labels, descriptions, aliases, statementGroups, datatypeId, revisionId); } @Override public ItemDocument getItemDocument(ItemIdValue itemIdValue, List labels, List descriptions, List aliases, List statementGroups, Map siteLinks, long revisionId) { return new ItemDocumentImpl( itemIdValue, labels, descriptions, aliases, statementGroups, new ArrayList<>(siteLinks.values()), revisionId); } @Override public LexemeDocument getLexemeDocument(LexemeIdValue lexemeIdValue, ItemIdValue lexicalCategory, ItemIdValue language, List lemmas, List statementGroups, List forms, List senses, long revisionId) { return new LexemeDocumentImpl(lexemeIdValue, lexicalCategory, language, lemmas, statementGroups, forms, senses, revisionId); } @Override public FormDocument getFormDocument(FormIdValue formIdValue, List representations, List grammaticalFeatures, List statementGroups, long revisionId) { return new FormDocumentImpl(formIdValue, representations, grammaticalFeatures, statementGroups, revisionId); } @Override public SenseDocument getSenseDocument(SenseIdValue senseIdValue, List glosses, List statementGroups, long revisionId) { return new SenseDocumentImpl(senseIdValue, glosses, statementGroups, revisionId); } @Override public MediaInfoDocument getMediaInfoDocument(MediaInfoIdValue mediaInfoIdValue, List labels, List statementGroups, long revisionId) { return new MediaInfoDocumentImpl( mediaInfoIdValue, labels, statementGroups, revisionId); } @Override public TermUpdate getTermUpdate( Collection modified, Collection removed) { return new TermUpdateImpl(modified, removed); } @Override public AliasUpdate getAliasUpdate( List recreated, List added, Collection removed) { return new AliasUpdateImpl(recreated, added, removed); } @Override public StatementUpdate getStatementUpdate( Collection added, Collection replaced, Collection removed) { return new StatementUpdateImpl(added, replaced, removed); } @Override public SenseUpdate getSenseUpdate( SenseIdValue entityId, long revisionId, TermUpdate glosses, StatementUpdate statements) { return new SenseUpdateImpl(entityId, revisionId, glosses, statements); } @Override public FormUpdate getFormUpdate( FormIdValue entityId, long revisionId, TermUpdate representations, Collection grammaticalFeatures, StatementUpdate statements) { return new FormUpdateImpl(entityId, revisionId, representations, grammaticalFeatures, statements); } @Override public LexemeUpdate getLexemeUpdate( LexemeIdValue entityId, long revisionId, ItemIdValue language, ItemIdValue lexicalCategory, TermUpdate lemmas, StatementUpdate statements, Collection addedSenses, Collection updatedSenses, Collection removedSenses, Collection addedForms, Collection updatedForms, Collection removedForms) { return new LexemeUpdateImpl(entityId, revisionId, language, lexicalCategory, lemmas, statements, addedSenses, updatedSenses, removedSenses, addedForms, updatedForms, removedForms); } @Override public MediaInfoUpdate getMediaInfoUpdate( MediaInfoIdValue entityId, long revisionId, TermUpdate labels, StatementUpdate statements) { return new MediaInfoUpdateImpl(entityId, revisionId, labels, statements); } @Override public ItemUpdate getItemUpdate( ItemIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements, Collection modifiedSiteLinks, Collection removedSiteLinks) { return new ItemUpdateImpl(entityId, revisionId, labels, descriptions, aliases, statements, modifiedSiteLinks, removedSiteLinks); } @Override public PropertyUpdate getPropertyUpdate( PropertyIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements) { return new PropertyUpdateImpl(entityId, revisionId, labels, descriptions, aliases, statements); } } DatatypeIdImpl.java000066400000000000000000000231701444772566300363160ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Jackson implementation of {@link DatatypeIdValue}. This is not actually * present in JSON but needed to satisfy the interface. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ public class DatatypeIdImpl implements DatatypeIdValue { /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_ITEM} in JSON. */ public static final String JSON_DT_ITEM = "wikibase-item"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_PROPERTY} in JSON. */ public static final String JSON_DT_PROPERTY = "wikibase-property"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_GLOBE_COORDINATES} in JSON. */ public static final String JSON_DT_GLOBE_COORDINATES = "globe-coordinate"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_ITEM} in JSON. */ public static final String JSON_DT_URL = "url"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_COMMONS_MEDIA} in JSON. */ public static final String JSON_DT_COMMONS_MEDIA = "commonsMedia"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_TIME} in JSON. */ public static final String JSON_DT_TIME = "time"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_QUANTITY} in JSON. */ public static final String JSON_DT_QUANTITY = "quantity"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_STRING} in JSON. */ public static final String JSON_DT_STRING = "string"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_MONOLINGUAL_TEXT} in JSON. */ public static final String JSON_DT_MONOLINGUAL_TEXT = "monolingualtext"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_EXTERNAL_ID} in JSON. */ public static final String JSON_DT_EXTERNAL_ID = "external-id"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_MATH} in JSON. */ public static final String JSON_DT_MATH = "math"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_GEO_SHAPE} in JSON. */ public static final String JSON_DT_GEO_SHAPE = "geo-shape"; /** * String used to refer to the property datatype * {@link DatatypeIdValue#DT_EDTF} in JSON. */ public static final String JSON_DT_EDTF = "edtf"; private static final Pattern JSON_DATATYPE_PATTERN = Pattern.compile("^[a-zA-Z\\-]+$"); private static final Pattern DATATYPE_ID_PATTERN = Pattern.compile("^http://wikiba\\.se/ontology#([a-zA-Z]+)$"); /** * Datatype IRI as used in Wikidata Toolkit. */ private final String iri; /** * JSON representation of the datatype. We store this as well * because the conversion mechanism between JSON datatypes and * datatype URIs is sadly not perfect: see * issue #716. */ private final String jsonString; /** * Returns the WDTK datatype IRI for the property datatype as represented by * the given JSON datatype string. * * @param jsonDatatype * the JSON datatype string; case-sensitive * @throws IllegalArgumentException * if the given datatype string is not known */ public static String getDatatypeIriFromJsonDatatype(String jsonDatatype) { switch (jsonDatatype) { case JSON_DT_ITEM: return DT_ITEM; case JSON_DT_PROPERTY: return DT_PROPERTY; case JSON_DT_GLOBE_COORDINATES: return DT_GLOBE_COORDINATES; case JSON_DT_URL: return DT_URL; case JSON_DT_COMMONS_MEDIA: return DT_COMMONS_MEDIA; case JSON_DT_TIME: return DT_TIME; case JSON_DT_QUANTITY: return DT_QUANTITY; case JSON_DT_STRING: return DT_STRING; case JSON_DT_MONOLINGUAL_TEXT: return DT_MONOLINGUAL_TEXT; case JSON_DT_EDTF: return DT_EDTF; default: String[] parts = jsonDatatype.split("-"); for(int i = 0; i < parts.length; i++) { parts[i] = StringUtils.capitalize(parts[i]); } return "http://wikiba.se/ontology#" + StringUtils.join(parts); } } /** * Returns the JSON datatype for the property datatype as represented by * the given WDTK datatype IRI string. * * @param datatypeIri * the WDTK datatype IRI string; case-sensitive * @throws IllegalArgumentException * if the given datatype string is not known * @deprecated this method is unreliable and will be removed in a future release. */ public static String getJsonDatatypeFromDatatypeIri(String datatypeIri) { switch (datatypeIri) { case DatatypeIdValue.DT_ITEM: return DatatypeIdImpl.JSON_DT_ITEM; case DatatypeIdValue.DT_GLOBE_COORDINATES: return DatatypeIdImpl.JSON_DT_GLOBE_COORDINATES; case DatatypeIdValue.DT_URL: return DatatypeIdImpl.JSON_DT_URL; case DatatypeIdValue.DT_COMMONS_MEDIA: return DatatypeIdImpl.JSON_DT_COMMONS_MEDIA; case DatatypeIdValue.DT_TIME: return DatatypeIdImpl.JSON_DT_TIME; case DatatypeIdValue.DT_QUANTITY: return DatatypeIdImpl.JSON_DT_QUANTITY; case DatatypeIdValue.DT_STRING: return DatatypeIdImpl.JSON_DT_STRING; case DatatypeIdValue.DT_MONOLINGUAL_TEXT: return DatatypeIdImpl.JSON_DT_MONOLINGUAL_TEXT; case DatatypeIdValue.DT_PROPERTY: return DatatypeIdImpl.JSON_DT_PROPERTY; case DatatypeIdValue.DT_EDTF: return DatatypeIdImpl.JSON_DT_EDTF; default: //We apply the reverse algorithm of JacksonDatatypeId::getDatatypeIriFromJsonDatatype Matcher matcher = DATATYPE_ID_PATTERN.matcher(datatypeIri); if(!matcher.matches()) { throw new IllegalArgumentException("Unknown datatype: " + datatypeIri); } StringBuilder jsonDatatypeBuilder = new StringBuilder(); for(char ch : StringUtils.uncapitalize(matcher.group(1)).toCharArray()) { if(Character.isUpperCase(ch)) { jsonDatatypeBuilder .append('-') .append(Character.toLowerCase(ch)); } else { jsonDatatypeBuilder.append(ch); } } return jsonDatatypeBuilder.toString(); } } /** * Copy constructor. */ public DatatypeIdImpl(DatatypeIdValue other) { this.iri = other.getIri(); this.jsonString = other.getJsonString(); } /** * Constructs an object representing the datatype id from a IRI denoting * the datatype. It also tries to determine the JSON datatype based on this * IRI, based on a buggy heuristic. If you also happen to have the JSON datatype * at hand, better use {@link DatatypeIdImpl(String, String)}. * * @param iri * the WDTK IRI for the datatype * @throws IllegalArgumentException * if the given datatype string could not be matched to a known * datatype or was null * @deprecated use {@link #DatatypeIdImpl(String, String)} */ public DatatypeIdImpl(String iri) throws IllegalArgumentException { Validate.notNull(iri, "An IRI must be provided to create a DatatypeIdValue"); this.iri = iri; // the JSON datatype is not supplied, so we fall back on our buggy heuristic // to guess how it should be represented in JSON. this.jsonString = getJsonDatatypeFromDatatypeIri(this.iri); } /** * Constructs an object representing the datatype id from an IRI denoting the datatype, * as well as a string corresponding to its JSON serialization. This constructor * is meant to be used for JSON deserialization. * * @param iri * the WDTK IRI for the datatype. This can be null. * @param jsonString * the JSON representation of the datatype. This cannot be null. * @throws IllegalArgumentException * if the given datatype string could not be matched to a known * datatype or was null */ public DatatypeIdImpl(String iri, String jsonString) throws IllegalArgumentException { Validate.notNull(jsonString, "A JSON representation of the datatype must be provided to create a DatatypeIdValue"); if(!JSON_DATATYPE_PATTERN.matcher(jsonString).matches()) { throw new IllegalArgumentException("Invalid JSON datatype \"" + jsonString + "\""); } this.jsonString = jsonString; this.iri = iri != null ? iri : getDatatypeIriFromJsonDatatype(jsonString); } /** * Returns the string used to represent this datatype in JSON. */ @Override public String getJsonString() { return this.jsonString; } @Override public String getIri() { return this.iri; } @Override public String toString() { return ToString.toString(this); } @Override public boolean equals(Object o) { return Equality.equalsDatatypeIdValue(this, o); } @Override public int hashCode() { return Hash.hashCode(this); } } EntityDocumentImpl.java000066400000000000000000000120511444772566300372350ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonSubTypes.Type; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; /** * Abstract Jackson implementation of {@link EntityDocument}. Like all Jackson * objects, it is not technically immutable, but it is strongly recommended to * treat it as such in all contexts: the setters are for Jackson; never call * them in your code. * * @author Thomas Pellissier Tanon * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes({ @Type(value = ItemDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_ITEM), @Type(value = LexemeDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_LEXEME), @Type(value = FormDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_FORM), @Type(value = SenseDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_SENSE), @Type(value = MediaInfoDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_MEDIA_INFO), @Type(value = PropertyDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_PROPERTY) }) public abstract class EntityDocumentImpl implements EntityDocument { /** * String used to refer to items in JSON. */ static final String JSON_TYPE_ITEM = "item"; /** * String used to refer to properties in JSON. */ static final String JSON_TYPE_PROPERTY = "property"; /** * String used to refer to lexemes in JSON. */ static final String JSON_TYPE_LEXEME = "lexeme"; /** * String used to refer to forms in JSON. */ static final String JSON_TYPE_FORM = "form"; /** * String used to refer to forms in JSON. */ static final String JSON_TYPE_SENSE = "sense"; /** * String used to refer to forms in JSON. */ static final String JSON_TYPE_MEDIA_INFO = "mediainfo"; /** * The id of the entity that the document refers to. This is not mapped to * JSON directly by Jackson but split into two fields, "type" and "id". The * type field is ignored during deserialization since the type is clear for * a concrete document. For serialization, the type is hard-coded. *

* The site IRI, which would also be required to create a complete * {@link EntityIdValue}, is not encoded in JSON. It needs to be injected * from the outside (if not, we default to Wikidata). */ @JsonIgnore protected final String entityId; /** * The site IRI that this document refers to, or null if not specified. In * the latter case, we assume Wikidata as the default. * * @see EntityIdValue#getSiteIri() */ @JsonIgnore protected final String siteIri; /** * The revision id of this document. * * @see EntityDocument#getRevisionId() */ @JsonIgnore protected final long revisionId; /** * Constructor. * * @param id * the identifier of the subject of this document * @param revisionId * the id of the last revision of this document */ EntityDocumentImpl(EntityIdValue id, long revisionId) { Validate.notNull(id); this.entityId = id.getId(); this.siteIri = id.getSiteIri(); this.revisionId = revisionId; } /** * Constructor used for JSON deserialization with Jackson. */ EntityDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { Validate.notNull(jsonId); this.entityId = jsonId; Validate.notNull(siteIri); this.siteIri = siteIri; this.revisionId = revisionId; } /** * Returns the string id of the entity that this document refers to. Only * for use by Jackson during serialization. * * @return string id */ @JsonInclude(Include.NON_EMPTY) @JsonProperty("id") public String getJsonId() { if (!EntityIdValue.SITE_LOCAL.equals(this.siteIri)) { return this.entityId; } else { return null; } } @JsonIgnore public String getSiteIri() { return this.siteIri; } private static class NonZeroFilter { @Override public boolean equals(Object other) { return (other instanceof Long) && (long)other == 0; } @Override public int hashCode() { return 0; } } @Override @JsonInclude(value=Include.CUSTOM, valueFilter=NonZeroFilter.class) @JsonProperty("lastrevid") public long getRevisionId() { return this.revisionId; } } EntityIdValueImpl.java000066400000000000000000000236141444772566300370170ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.annotation.*; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Abstract base implementation of {@link EntityIdValue} for Jackson. * * @author Markus Kroetzsch * @author Fredo Erxleben * @author Thomas Pellissier Tanon * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) public abstract class EntityIdValueImpl extends ValueImpl implements EntityIdValue { /** * The string used in JSON to denote the type of entity id values that are * items. */ public final static String JSON_ENTITY_TYPE_ITEM = "item"; /** * The string used in JSON to denote the type of entity id values that are * properties. */ public final static String JSON_ENTITY_TYPE_PROPERTY = "property"; /** * The string used in JSON to denote the type of entity id values that are * lexemes. */ public final static String JSON_ENTITY_TYPE_LEXEME = "lexeme"; /** * The string used in JSON to denote the type of entity id values that are * lexemes forms. */ public final static String JSON_ENTITY_TYPE_FORM = "form"; /** * The string used in JSON to denote the type of entity id values that are * lexemes senses. */ public final static String JSON_ENTITY_TYPE_SENSE = "sense"; /** * The string used in JSON to denote the type of entity id values that are * media info. */ public final static String JSON_ENTITY_TYPE_MEDIA_INFO = "mediainfo"; /** * The site IRI that this value refers to. This data not part of the JSON * serialization of value, but is needed in WDTK to build all current types * of {@link EntityIdValue} objects. */ private final String siteIri; /** * Inner helper object to store the actual data. Used to get the nested JSON * structure that is required here. */ private final JacksonInnerEntityId value; /** * Constructor. * @param id * the identifier of the entity, such as "Q42" * @param siteIri * the siteIRI that this value refers to */ protected EntityIdValueImpl( String id, String siteIri) { super(JSON_VALUE_TYPE_ENTITY_ID); this.value = new JacksonInnerEntityId(id); Validate.notNull(siteIri, "Entity site IRIs cannot be null"); this.siteIri = siteIri; } /** * Constructor used for deserialization with Jackson. */ @JsonCreator protected EntityIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject String siteIri) { super(JSON_VALUE_TYPE_ENTITY_ID); this.value = value; this.siteIri = siteIri; } /** * Parses an item id * * @param id * the identifier of the entity, such as "Q42" * @param siteIri * the siteIRI that this value refers to * @throws IllegalArgumentException * if the id is invalid */ public static EntityIdValue fromId(String id, String siteIri) { switch (guessEntityTypeFromId(id, true)) { case EntityIdValueImpl.JSON_ENTITY_TYPE_ITEM: return new ItemIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_PROPERTY: return new PropertyIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_LEXEME: return new LexemeIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_FORM: return new FormIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_SENSE: return new SenseIdValueImpl(id, siteIri); case EntityIdValueImpl.JSON_ENTITY_TYPE_MEDIA_INFO: return new MediaInfoIdValueImpl(id, siteIri); default: throw new IllegalArgumentException("Entity id \"" + id + "\" is not supported."); } } /** * Returns the entity type of the id like "item" or "property" * * @param id * the identifier of the entity, such as "Q42" * @param returnJsonEntity * returns JSON entity types when set to true * @throws IllegalArgumentException * if the id is invalid */ static String guessEntityTypeFromId(String id, boolean returnJsonEntity) { if(id.isEmpty()) { throw new IllegalArgumentException("Entity ids should not be empty."); } switch (id.charAt(0)) { case 'L': if(id.contains("-F")) { return returnJsonEntity ? JSON_ENTITY_TYPE_FORM : DatatypeIdValue.DT_FORM; } else if(id.contains("-S")) { return returnJsonEntity ? JSON_ENTITY_TYPE_SENSE : DatatypeIdValue.DT_SENSE; } else { return returnJsonEntity ? JSON_ENTITY_TYPE_LEXEME : DatatypeIdValue.DT_LEXEME; } case 'M': return returnJsonEntity ? JSON_ENTITY_TYPE_MEDIA_INFO : DatatypeIdValue.DT_MEDIA_INFO; case 'P': return returnJsonEntity ? JSON_ENTITY_TYPE_PROPERTY : DatatypeIdValue.DT_PROPERTY; case 'Q': return returnJsonEntity ? JSON_ENTITY_TYPE_ITEM : DatatypeIdValue.DT_ITEM; default: throw new IllegalArgumentException("Entity id \"" + id + "\" is not supported."); } } /** * Returns the entity type of the id like "item" or "property" * * @param id * the identifier of the entity, such as "Q42" * @throws IllegalArgumentException * if the id is invalid */ public static String guessEntityTypeFromId(String id){ return guessEntityTypeFromId(id, false); } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner entity id value */ @JsonProperty("value") public JacksonInnerEntityId getValue() { return value; } @JsonIgnore @Override public String getIri() { return this.getSiteIri().concat(this.getId()); } @JsonIgnore @Override public String getId() { return this.value.getStringId(); } @JsonIgnore @Override public String getSiteIri() { if (this.siteIri != null) { return this.siteIri; } else { throw new RuntimeException( "Cannot access the site IRI id of an insufficiently initialised Jackson value."); } } protected void assertHasJsonEntityType(String expectedType) { if(!expectedType.equals(value.entityType)) { throw new IllegalArgumentException( "The value should have the entity-type \"" + expectedType + "\": " + this ); } } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerEntityId { private final String id; private final String entityType; private final int numericId; JacksonInnerEntityId(String id) { this.id = id; entityType = guessEntityTypeFromId(id, true); numericId = buildNumericId(id); } /** * Creates an object that can be populated during JSON deserialization. * Should only be used by Jackson for this very purpose. */ @JsonCreator JacksonInnerEntityId( @JsonProperty("id") String id, @JsonProperty("numeric-id") int numericId, @JsonProperty("entity-type") String entityType ) { if(id == null) { if(entityType == null || numericId == 0) { throw new IllegalArgumentException("You should provide an id or an entity type and a numeric id"); } else { this.id = buildIdFromNumericId(entityType, numericId); this.entityType = entityType; this.numericId = numericId; } } else { this.id = id; if(entityType == null || numericId == 0) { this.entityType = guessEntityTypeFromId(id, true); this.numericId = buildNumericId(id); } else if(!id.equals(buildIdFromNumericId(entityType, numericId))) { throw new IllegalArgumentException("Numerical id is different from the string id"); } else { this.entityType = entityType; this.numericId = numericId; } } } /** * Returns the entity type string as used in JSON. Only for use by Jackson * during serialization. * * @return the entity type string */ @JsonProperty("entity-type") public String getJsonEntityType() { return entityType; } /** * Returns the numeric item id as used in JSON. Only for use by Jackson * during serialization. * * @return the numeric entity id */ @JsonProperty("numeric-id") public int getNumericId() { return numericId; } /** * Returns the standard string version of the entity id encoded in this * value. For example, an id with entityType "item" and numericId "42" is * normally identified as "Q42". * * @return the string id */ @JsonProperty("id") public String getStringId() { return id; } private int buildNumericId(String id) { if (id.length() <= 1) { throw new IllegalArgumentException( "Wikibase entity ids must have the form \"(L|P|Q)\". Given id was \"" + id + "\""); } try { return Integer.parseInt(id.substring(1)); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Wikibase entity ids must have the form \"(L|P|Q)\". Given id was \"" + id + "\""); } } private String buildIdFromNumericId(String entityType, int numericId) { switch (entityType) { case JSON_ENTITY_TYPE_ITEM: return "Q" + numericId; case JSON_ENTITY_TYPE_LEXEME: return "L" + numericId; case JSON_ENTITY_TYPE_PROPERTY: return "P" + numericId; case JSON_ENTITY_TYPE_MEDIA_INFO: return "M" + numericId; default: throw new IllegalArgumentException("Entities of type \"" + entityType + "\" are not supported in property values."); } } } } EntityRedirectDocumentImpl.java000066400000000000000000000071171444772566300407260ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityRedirectDocument; /** * Implementation of {@link EntityRedirectDocument} * * @author Thomas Pellissier Tanon * */ public class EntityRedirectDocumentImpl implements EntityRedirectDocument { private final EntityIdValue entityId; private final EntityIdValue targetId; private final long revisionId; /** * Constructor. * * @param id * the identifier of the subject of this document * @param targetId * the identifier of the entity this document redirect to * @param revisionId * the id of the last revision of this document */ EntityRedirectDocumentImpl(EntityIdValue id, EntityIdValue targetId, long revisionId) { Validate.notNull(id); this.entityId = id; Validate.notNull(targetId); Validate.isTrue(id.getEntityType().equals(targetId.getEntityType()), "You could only do redirects between entities of the same type"); this.targetId = targetId; this.revisionId = revisionId; } /** * Constructor used for JSON deserialization with Jackson. */ @JsonCreator EntityRedirectDocumentImpl( @JsonProperty("entity") String jsonId, @JsonProperty("redirect") String jsonTargetId, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { this.entityId = EntityIdValueImpl.fromId(jsonId, siteIri); Validate.notNull(jsonTargetId); this.targetId = EntityIdValueImpl.fromId(jsonTargetId, siteIri); Validate.isTrue(getEntityId().getEntityType().equals(targetId.getEntityType()), "You could only do redirects between entities of the same type"); this.revisionId = revisionId; } @JsonIgnore @Override public EntityIdValue getEntityId() { return entityId; } @JsonProperty("entity") String getEntityJson() { return entityId.getId(); } @JsonIgnore @Override public long getRevisionId() { return revisionId; } @Override public EntityRedirectDocument withRevisionId(long newRevisionId) { return new EntityRedirectDocumentImpl(entityId, targetId, newRevisionId); } @JsonIgnore @Override public EntityIdValue getTargetId() { return targetId; } @JsonProperty("redirect") String getTargetJson() { return targetId.getId(); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityRedirectDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } } EntityUpdateImpl.java000066400000000000000000000041301444772566300367000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.Objects; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; /** * Jackson implementation of {@link EntityUpdate}. */ public abstract class EntityUpdateImpl implements EntityUpdate { @JsonIgnore private final EntityIdValue entityId; @JsonIgnore private final long revisionId; /** * Initializes new entity update. * * @param entityId * ID of the entity that is to be updated * @param revisionId * base entity revision to be updated or zero if not available * @throws NullPointerException * if {@code entityId} is {@code null} * @throws IllegalArgumentException * if {@code entityId} is a placeholder ID or it does not match base * revision document ID (if provided) */ protected EntityUpdateImpl(EntityIdValue entityId, long revisionId) { Objects.requireNonNull(entityId, "Entity ID cannot be null."); Validate.isTrue(!entityId.isPlaceholder(), "Cannot create update for placeholder entity ID."); this.entityId = entityId; this.revisionId = revisionId; } @JsonIgnore @Override public EntityIdValue getEntityId() { return entityId; } @JsonIgnore @Override public long getBaseRevisionId() { return revisionId; } } FormDocumentImpl.java000066400000000000000000000171351444772566300366740ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; /** * Jackson implementation of {@link FormDocument}. * * @author Thomas Pellissier Tanon */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeInfo(use = JsonTypeInfo.Id.NONE) public class FormDocumentImpl extends StatementDocumentImpl implements FormDocument { private final List grammaticalFeatures; private final Map representations; /** * Constructor. * * @param id * the id of the le that data is about * @param representations * the list of representations of this lexeme, with at most one * lemma for each language code * @param grammaticalFeatures * the grammatical features of the lexeme * @param statements * the list of statement groups of this lexeme; all of them must * have the given id as their subject * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} */ FormDocumentImpl( FormIdValue id, List representations, List grammaticalFeatures, List statements, long revisionId) { super(id, statements, revisionId); this.representations = (representations == null || representations.isEmpty()) ? Collections.emptyMap() : constructTermMap(representations); this.grammaticalFeatures = (grammaticalFeatures == null) ? Collections.emptyList() : grammaticalFeatures; this.grammaticalFeatures.sort(Comparator.comparing(EntityIdValue::getId)); } /** * Constructor. Creates an object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. */ @JsonCreator FormDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("representations") @JsonDeserialize(contentAs=TermImpl.class) Map representations, @JsonProperty("grammaticalFeatures") List grammaticalFeatures, @JsonProperty("claims") Map> claims, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, claims, revisionId, siteIri); this.representations = (representations == null) ? Collections.emptyMap() : representations; this.grammaticalFeatures = (grammaticalFeatures == null || grammaticalFeatures.isEmpty()) ? Collections.emptyList() : constructGrammaticalFeatures(grammaticalFeatures, siteIri); } /** * Copy constructor, used when creating modified copies of forms. */ private FormDocumentImpl( FormIdValue id, Map representations, List grammaticalFeatures, Map> statements, long revisionId) { super(id, statements, revisionId); this.representations = representations; this.grammaticalFeatures = grammaticalFeatures; } private static Map constructTermMap(List terms) { Map map = new HashMap<>(); for(MonolingualTextValue term : terms) { String language = term.getLanguageCode(); if(map.containsKey(language)) { throw new IllegalArgumentException("Multiple terms provided for the same language."); } // We need to make sure the terms are of the right type, otherwise they will not // be serialized correctly. map.put(language, toTerm(term)); } return map; } private static MonolingualTextValue toTerm(MonolingualTextValue term) { return (term instanceof TermImpl) ? term : new TermImpl(term.getLanguageCode(), term.getText()); } private List constructGrammaticalFeatures(List grammaticalFeatures, String siteIri) { List output = new ArrayList<>(grammaticalFeatures.size()); for(String grammaticalFeature : grammaticalFeatures) { output.add(new ItemIdValueImpl(grammaticalFeature, siteIri)); } return output; } @JsonIgnore @Override public FormIdValue getEntityId() { return new FormIdValueImpl(entityId, siteIri); } @JsonIgnore @Override public List getGrammaticalFeatures() { return grammaticalFeatures; } @JsonProperty("grammaticalFeatures") List getJsonGrammaticalFeatures() { if (grammaticalFeatures.isEmpty()) { return Collections.emptyList(); } List output = new ArrayList<>(grammaticalFeatures.size()); for(ItemIdValue feature : grammaticalFeatures) { output.add(feature.getId()); } return output; } @JsonProperty("type") String getType() { return EntityDocumentImpl.JSON_TYPE_FORM; } @JsonProperty("representations") @Override public Map getRepresentations() { return representations; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsFormDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public FormDocument withEntityId(FormIdValue newEntityId) { return new FormDocumentImpl(newEntityId, representations, grammaticalFeatures, claims, revisionId); } @Override public FormDocument withRevisionId(long newRevisionId) { return new FormDocumentImpl(getEntityId(), representations, grammaticalFeatures, claims, newRevisionId); } @Override public FormDocument withRepresentation(MonolingualTextValue representation) { Map newRepresentations = new HashMap<>(representations); newRepresentations.put(representation.getLanguageCode(), toTerm(representation)); return new FormDocumentImpl(getEntityId(), newRepresentations, grammaticalFeatures, claims, revisionId); } @Override public FormDocument withGrammaticalFeature(ItemIdValue grammaticalFeature) { if (grammaticalFeatures.contains(grammaticalFeature)) { return this; } List newGrammaticalFeatures = new ArrayList<>(grammaticalFeatures); newGrammaticalFeatures.add(grammaticalFeature); return new FormDocumentImpl(getEntityId(), representations, newGrammaticalFeatures, claims, revisionId); } @Override public FormDocument withStatement(Statement statement) { Map> newGroups = addStatementToGroups(statement, claims); return new FormDocumentImpl(getEntityId(), representations, grammaticalFeatures, newGroups, revisionId); } @Override public FormDocument withoutStatementIds(Set statementIds) { Map> newGroups = removeStatements(statementIds, claims); return new FormDocumentImpl(getEntityId(), representations, grammaticalFeatures, newGroups, revisionId); } } FormIdValueImpl.java000066400000000000000000000106141444772566300364420ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import java.util.regex.Pattern; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Jackson implementation of {@link FormIdValue}. * TODO: It is not possible to use it as statement value yet. * * @author Thomas Pellissier Tanon * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class FormIdValueImpl extends ValueImpl implements FormIdValue { /* * Allow L0-F0 from FormIdValue.NULL. */ private static final Pattern PATTERN = Pattern.compile("L[1-9]\\d*-F[1-9]\\d*|L0-F0"); private final String id; private final String siteIri; /** * Constructor. * * @param id * the identifier of the entity, such as "L42-F43" * @param siteIri * the siteIRI that this value refers to */ FormIdValueImpl( String id, String siteIri) { super(JSON_VALUE_TYPE_ENTITY_ID); if(id == null || !PATTERN.matcher(id).matches()) { throw new IllegalArgumentException("The string " + id + " is not a valid form id"); } this.id = id; Validate.notNull(siteIri); this.siteIri = siteIri; } /** * Constructor used for deserialization with Jackson. */ @JsonCreator FormIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject("siteIri") String siteIri) { this(value.getStringId(), siteIri); } @JsonIgnore @Override public String getEntityType() { return EntityIdValue.ET_FORM; } @JsonIgnore @Override public String getId() { return id; } @JsonIgnore @Override public String getSiteIri() { return siteIri; } @JsonIgnore @Override public String getIri() { return siteIri + id; } @JsonIgnore @Override public boolean isPlaceholder() { return id.equals("L0-F0"); } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner entity id value */ @JsonProperty("value") JacksonInnerEntityId getValue() { return new JacksonInnerEntityId(id); } @JsonIgnore @Override public LexemeIdValue getLexemeId() { return new LexemeIdValueImpl(id.substring(0, id.indexOf("-")), siteIri); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityIdValue(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerEntityId { private final String id; @JsonCreator JacksonInnerEntityId( @JsonProperty("id") String id ) { this.id = id; } /** * Returns the entity type string as used in JSON. Only for use by Jackson * during serialization. * * @return the entity type string */ @JsonProperty("entity-type") String getJsonEntityType() { return "form"; } /** * Returns the standard string version of the entity id encoded in this * value. For example, an id with entityType "item" and numericId "42" is * normally identified as "Q42". * * @return the string id */ @JsonProperty("id") String getStringId() { return id; } } } FormUpdateImpl.java000066400000000000000000000107411444772566300363340ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link FormUpdate}. */ public class FormUpdateImpl extends StatementDocumentUpdateImpl implements FormUpdate { @JsonIgnore private final TermUpdate representations; @JsonIgnore private final Set grammaticalFeatures; /** * Initializes new form update. * * @param entityId * ID of the form that is to be updated * @param revisionId * base form revision to be updated or zero if not available * @param representations * changes in form representations, possibly empty * @param grammaticalFeatures * new grammatical features of the form or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public FormUpdateImpl( FormIdValue entityId, long revisionId, TermUpdate representations, Collection grammaticalFeatures, StatementUpdate statements) { super(entityId, revisionId, statements); Objects.requireNonNull(representations, "Representation update cannot be null."); if (grammaticalFeatures != null) { for (ItemIdValue feature : grammaticalFeatures) { Objects.requireNonNull(feature, "Grammatical feature cannot be null."); Validate.isTrue(!feature.isPlaceholder(), "Grammatical feature cannot be a placeholder ID."); } Validate.isTrue( grammaticalFeatures.stream().distinct().count() == grammaticalFeatures.size(), "Grammatical features must be unique."); } this.representations = representations; this.grammaticalFeatures = grammaticalFeatures != null ? Collections.unmodifiableSet(new HashSet<>(grammaticalFeatures)) : null; } @JsonIgnore @Override public FormIdValue getEntityId() { return (FormIdValue) super.getEntityId(); } @JsonIgnore @Override public boolean isEmpty() { return super.isEmpty() && representations.isEmpty() && grammaticalFeatures == null; } @JsonIgnore @Override public TermUpdate getRepresentations() { return representations; } @JsonProperty("representations") @JsonInclude(Include.NON_NULL) TermUpdate getJsonRepresentations() { return representations.isEmpty() ? null : representations; } @JsonIgnore @Override public Optional> getGrammaticalFeatures() { return Optional.ofNullable(grammaticalFeatures); } @JsonProperty("grammaticalFeatures") @JsonInclude(Include.NON_NULL) List getJsonGrammaticalFeatures() { if (grammaticalFeatures == null) return null; return grammaticalFeatures.stream().map(f -> f.getId()).collect(toList()); } @Override public boolean equals(Object obj) { return Equality.equalsFormUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } GlobeCoordinatesValueImpl.java000066400000000000000000000140051444772566300405030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link GlobeCoordinatesValue}. * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Markus Kroetzsch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class GlobeCoordinatesValueImpl extends ValueImpl implements GlobeCoordinatesValue { /** * Inner helper object to store the actual data. Used to get the nested JSON * structure that is required here. */ private final JacksonInnerGlobeCoordinates value; /** * Constructor. * * @param latitude * the latitude of the coordinates in degrees * @param longitude * the longitude of the coordinates in degrees * @param precision * the precision of the coordinates in degrees * @param globe * IRI specifying the celestial objects of the coordinates */ public GlobeCoordinatesValueImpl(double latitude, double longitude, double precision, String globe) { super(JSON_VALUE_TYPE_GLOBE_COORDINATES); this.value = new JacksonInnerGlobeCoordinates(latitude, longitude, precision, globe); } /** * Constructor for deserialization from JSON via Jackson. */ @JsonCreator GlobeCoordinatesValueImpl( @JsonProperty("value") JacksonInnerGlobeCoordinates innerCoordinates) { super(JSON_VALUE_TYPE_GLOBE_COORDINATES); this.value = innerCoordinates; } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner globe coordinates value */ public JacksonInnerGlobeCoordinates getValue() { return value; } @JsonIgnore @Override public double getLatitude() { return this.value.getLatitude(); } @JsonIgnore @Override public double getLongitude() { return this.value.getLongitude(); } @JsonIgnore @Override public double getPrecision() { return this.value.getPrecision(); } @JsonIgnore @Override public String getGlobe() { return this.value.getGlobe(); } @JsonIgnore @Override public ItemIdValue getGlobeItemId() { return ItemIdValueImpl.fromIri(this.value.getGlobe()); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsGlobeCoordinatesValue(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerGlobeCoordinates { private final double latitude; private final double longitude; private final double precision; private final String globe; /** * Constructor. Creates an oject that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. */ @JsonCreator JacksonInnerGlobeCoordinates( @JsonProperty("latitude") double latitude, @JsonProperty("longitude") double longitude, @JsonProperty("precision") double precision, @JsonProperty("globe") String globe) { Validate.notNull(globe, "globe IRI must not be null"); if ((latitude > 90 * PREC_DEGREE) || (latitude < -90 * PREC_DEGREE)) { throw new IllegalArgumentException( "Latitude must be between 90 degrees and -90 degrees."); } if ((longitude > 360 * PREC_DEGREE) || (longitude < -360 * PREC_DEGREE)) { throw new IllegalArgumentException( "Longitude must be between -360 degrees and +360 degrees."); } this.latitude = latitude; this.longitude = longitude; if (precision <= 0.0) { // We just do this silently because it is so common in the data. // Precision "0" does not make sense for a physical quantity. // Automatic precision does not make sense for floating point // values. "0" also is commonly produced from "null" in JSON. this.precision = PREC_ARCSECOND; } else { this.precision = precision; } this.globe = globe; } /** * Returns the latitude. * * @see GlobeCoordinatesValue#getLatitude() * @return latitude */ public double getLatitude() { return this.latitude; } /** * Returns the longitude. * * @see GlobeCoordinatesValue#getLongitude() * @return longitude */ public double getLongitude() { return this.longitude; } /** * Returns the precision. * * @see GlobeCoordinatesValue#getPrecision() * @return precision */ public double getPrecision() { return this.precision; } /** * Returns the globe. * * @see GlobeCoordinatesValue#getGlobe() * @return globe */ public String getGlobe() { return this.globe; } } } ItemDocumentImpl.java000066400000000000000000000161511444772566300366640ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link ItemDocument}. * * @author Fredo Erxleben * @author Antonin Delpeuch */ @JsonIgnoreProperties(ignoreUnknown = true) public class ItemDocumentImpl extends TermedStatementDocumentImpl implements ItemDocument { /** * Map to store site links. */ @JsonDeserialize(contentAs=SiteLinkImpl.class) private final Map sitelinks; /** * Constructor. * * @param id * the id of the item that data is about * @param labels * the list of labels of this item, with at most one label for * each language code * @param descriptions * the list of descriptions of this item, with at most one * description for each language code * @param aliases * the list of aliases of this item * @param statements * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param siteLinks * the sitelinks of this item * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} */ public ItemDocumentImpl( ItemIdValue id, List labels, List descriptions, List aliases, List statements, List siteLinks, long revisionId) { super(id, labels, descriptions, aliases, statements, revisionId); this.sitelinks = new HashMap<>(); for(SiteLink sitelink : siteLinks) { if(this.sitelinks.containsKey(sitelink.getSiteKey())) { throw new IllegalArgumentException("Multiple site links provided for the same site."); } else { this.sitelinks.put(sitelink.getSiteKey(), sitelink); } } } /** * Constructor. Creates an object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. */ @JsonCreator public ItemDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("labels") @JsonDeserialize(contentAs=TermImpl.class) Map labels, @JsonProperty("descriptions") @JsonDeserialize(contentAs=TermImpl.class) Map descriptions, @JsonProperty("aliases") @JsonDeserialize(using = AliasesDeserializer.class) Map> aliases, @JsonProperty("claims") Map> claims, @JsonProperty("sitelinks") Map sitelinks, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, labels, descriptions, aliases, claims, revisionId, siteIri); if (sitelinks != null) { this.sitelinks = sitelinks; } else { this.sitelinks = Collections.emptyMap(); } } /** * Protected constructor, meant to be used to create modified copies * of instances. */ protected ItemDocumentImpl( ItemIdValue subject, Map labels, Map descriptions, Map> aliases, Map> claims, Map siteLinks, long revisionId) { super(subject, labels, descriptions, aliases, claims, revisionId); this.sitelinks = siteLinks; } @JsonIgnore @Override public ItemIdValue getEntityId() { return new ItemIdValueImpl(entityId, siteIri); } @JsonProperty("sitelinks") @Override public Map getSiteLinks() { return Collections. unmodifiableMap(this.sitelinks); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsItemDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public ItemDocument withEntityId(ItemIdValue itemId) { return new ItemDocumentImpl(itemId, labels, descriptions, aliases, claims, sitelinks, revisionId); } @Override public ItemDocument withRevisionId(long newRevisionId) { return new ItemDocumentImpl(getEntityId(), labels, descriptions, aliases, claims, sitelinks, newRevisionId); } @Override public ItemDocument withLabel(MonolingualTextValue newLabel) { return new ItemDocumentImpl(getEntityId(), withTerm(labels, newLabel), descriptions, aliases, claims, sitelinks, revisionId); } @Override public ItemDocument withDescription(MonolingualTextValue newDescription) { return new ItemDocumentImpl(getEntityId(), labels, withTerm(descriptions, newDescription), aliases, claims, sitelinks, revisionId); } @Override public ItemDocument withAliases(String language, List aliases) { return new ItemDocumentImpl(getEntityId(), labels, descriptions, withAliases(this.aliases, language, aliases), claims, sitelinks, revisionId); } @Override public ItemDocument withStatement(Statement statement) { Map> newGroups = addStatementToGroups(statement, claims); return new ItemDocumentImpl(getEntityId(), labels, descriptions, aliases, newGroups, sitelinks, revisionId); } @Override public ItemDocument withoutStatementIds(Set statementIds) { Map> newGroups = removeStatements(statementIds, claims); return new ItemDocumentImpl(getEntityId(), labels, descriptions, aliases, newGroups, sitelinks, revisionId); } } ItemIdValueImpl.java000066400000000000000000000062541444772566300364420ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Jackson implementation of {@link ItemIdValue}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class ItemIdValueImpl extends EntityIdValueImpl implements ItemIdValue { /** * Constructor. * * @param id * the identifier of the entity, such as "Q42" * @param siteIri * the siteIRI that this value refers to */ public ItemIdValueImpl( String id, String siteIri) { super(id, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_ITEM); } /** * Parses an item IRI * * @param iri * the item IRI like http://www.wikidata.org/entity/Q42 * @throws IllegalArgumentException * if the IRI is invalid or does not ends with an item id */ static ItemIdValueImpl fromIri(String iri) { int separator = iri.lastIndexOf('/') + 1; try { return new ItemIdValueImpl(iri.substring(separator), iri.substring(0, separator)); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Invalid Wikibase entity IRI: " + iri, e); } } /** * Constructor used for deserialization with Jackson. * * @param value * the inner JSON object deserialized as a {@link JacksonInnerEntityId} * @param siteIri * the siteIRI that this value refers to. */ @JsonCreator ItemIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject("siteIri") String siteIri) { super(value, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_ITEM); } @JsonIgnore @Override public String getEntityType() { return EntityIdValue.ET_ITEM; } @JsonIgnore @Override public boolean isPlaceholder() { return getId().equals("Q0"); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityIdValue(this, obj); } @Override public String toString() { return ToString.toString(this); } } ItemUpdateImpl.java000066400000000000000000000124711444772566300363310ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toMap; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Stream; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link ItemUpdate}. */ public class ItemUpdateImpl extends TermedDocumentUpdateImpl implements ItemUpdate { @JsonIgnore private final Map modifiedSiteLinks; @JsonIgnore private final Set removedSiteLinks; /** * Initializes new item update. * * @param entityId * ID of the item that is to be updated * @param revisionId * base item revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions or {@code null} for no change * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @param modifiedSiteLinks * added or replaced site links * @param removedSiteLinks * site keys of removed site links * @throws NullPointerException * if any required parameter or its item is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public ItemUpdateImpl( ItemIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements, Collection modifiedSiteLinks, Collection removedSiteLinks) { super(entityId, revisionId, labels, descriptions, aliases, statements); Objects.requireNonNull(modifiedSiteLinks, "Collection of modified site links cannot be null."); Objects.requireNonNull(removedSiteLinks, "Collection of removed site links cannot be null."); for (SiteLink link : modifiedSiteLinks) { Objects.requireNonNull(link, "Site link cannot be null."); } for (String siteKey : removedSiteLinks) { Validate.notBlank(siteKey, "Site key of removed site link cannot be null or blank."); } long distinctSiteKeys = Stream .concat( modifiedSiteLinks.stream().map(l -> l.getSiteKey()), removedSiteLinks.stream()) .distinct() .count(); Validate.isTrue(distinctSiteKeys == modifiedSiteLinks.size() + removedSiteLinks.size(), "Duplicate site key."); this.modifiedSiteLinks = Collections.unmodifiableMap(modifiedSiteLinks.stream() .collect(toMap(sl -> sl.getSiteKey(), sl -> sl))); this.removedSiteLinks = Collections.unmodifiableSet(new HashSet<>(removedSiteLinks)); } @JsonIgnore @Override public ItemIdValue getEntityId() { return (ItemIdValue) super.getEntityId(); } @JsonIgnore @Override public boolean isEmpty() { return super.isEmpty() && modifiedSiteLinks.isEmpty() && removedSiteLinks.isEmpty(); } @JsonIgnore @Override public Map getModifiedSiteLinks() { return modifiedSiteLinks; } @JsonIgnore @Override public Set getRemovedSiteLinks() { return removedSiteLinks; } static class RemovedSiteLink { private final String site; RemovedSiteLink(String site) { this.site = site; } @JsonProperty String getSite() { return site; } @JsonProperty("remove") String getRemoveCommand() { return ""; } } @JsonProperty("sitelinks") @JsonInclude(Include.NON_EMPTY) Map getJsonSiteLinks() { Map map = new HashMap<>(); for (SiteLink link : modifiedSiteLinks.values()) { map.put(link.getSiteKey(), link); } for (String site : removedSiteLinks) { map.put(site, new RemovedSiteLink(site)); } return map; } @Override public boolean equals(Object obj) { return Equality.equalsItemUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } LabeledDocumentUpdateImpl.java000066400000000000000000000051361444772566300404620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.Objects; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link LabeledStatementDocumentUpdate}. */ public abstract class LabeledDocumentUpdateImpl extends StatementDocumentUpdateImpl implements LabeledStatementDocumentUpdate { @JsonIgnore private final TermUpdate labels; /** * Initializes new entity update. * * @param entityId * ID of the entity that is to be updated * @param revisionId * base entity revision to be updated or zero if not available * @param labels * changes in entity labels, possibly empty * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ protected LabeledDocumentUpdateImpl( EntityIdValue entityId, long revisionId, TermUpdate labels, StatementUpdate statements) { super(entityId, revisionId, statements); Objects.requireNonNull(labels, "Label update cannot be null."); this.labels = labels; } @JsonIgnore @Override public boolean isEmpty() { return super.isEmpty() && labels.isEmpty(); } @JsonIgnore @Override public TermUpdate getLabels() { return labels; } @JsonProperty("labels") @JsonInclude(Include.NON_NULL) TermUpdate getJsonLabels() { return labels.isEmpty() ? null : labels; } } LabeledStatementDocumentImpl.java000066400000000000000000000103141444772566300411760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonProperty; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Abstract Jackson implementation of {@link LabeledDocument} and {@link StatementDocument}. * You should not rely on it directly but build instances with the Datamodel helper and * use {@link EntityDocumentImpl} for deserialization. * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Thomas Pellissier Tanon * */ abstract class LabeledStatementDocumentImpl extends StatementDocumentImpl implements LabeledStatementDocument { protected final Map labels; /** * Constructor. * * @param id * the identifier of the subject of this document * @param labels * the labels for this entity, at most one per language * @param claims * the statement groups contained in this document * @param revisionId * the id of the last revision of this document */ public LabeledStatementDocumentImpl( EntityIdValue id, List labels, List claims, long revisionId) { super(id, claims, revisionId); this.labels = (labels == null) ? Collections.emptyMap() : constructTermMap(labels); } /** * Constructor used for JSON deserialization with Jackson. */ LabeledStatementDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("labels") Map labels, @JsonProperty("claims") Map> claims, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, claims, revisionId, siteIri); this.labels = (labels == null) ? Collections.emptyMap() : labels; } /** * Protected constructor provided to ease the creation * of copies. No check is made and each field is reused without * copying. * * @param labels * a map from language codes to monolingual values with * the same language codes * @param claims * @param revisionId */ protected LabeledStatementDocumentImpl( EntityIdValue subject, Map labels, Map> claims, long revisionId) { super(subject, claims, revisionId); this.labels = labels; } @JsonProperty("labels") @Override public Map getLabels() { return Collections.unmodifiableMap(this.labels); } protected static Map constructTermMap(List terms) { Map map = new HashMap<>(); for(MonolingualTextValue term : terms) { String language = term.getLanguageCode(); if(map.containsKey(language)) { throw new IllegalArgumentException("Multiple terms provided for the same language."); } map.put(language, toTerm(term)); } return map; } protected static Map withTerm( Map values, MonolingualTextValue value) { Map newValues = new HashMap<>(values); newValues.put(value.getLanguageCode(), toTerm(value)); return newValues; } /** * We need to make sure the terms are of the right type, otherwise they will not be serialized correctly. */ private static MonolingualTextValue toTerm(MonolingualTextValue term) { return term instanceof TermImpl ? term : new TermImpl(term.getLanguageCode(), term.getText()); } } LexemeDocumentImpl.java000066400000000000000000000272701444772566300372110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.LexemeDeserializer; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Jackson implementation of {@link LexemeDocument}. * * @author Thomas Pellissier Tanon */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize(using = LexemeDeserializer.class) public class LexemeDocumentImpl extends StatementDocumentImpl implements LexemeDocument { private final ItemIdValue lexicalCategory; private final ItemIdValue language; private final Map lemmas; private final List forms; private final List senses; private int nextFormId; private int nextSenseId; /** * Constructor. * * @param id * the id of the le that data is about * @param lexicalCategory * the lexical category of the lexeme * @param language * the language of the lexeme * @param lemmas * the list of lemmas of this lexeme, with at most one * lemma for each language code * @param statements * the list of statement groups of this lexeme; all of them must * have the given id as their subject * @param forms * the list of the forms of this lexeme. * @param senses * the list of the senses of this lexeme. * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} */ LexemeDocumentImpl( LexemeIdValue id, ItemIdValue lexicalCategory, ItemIdValue language, List lemmas, List statements, List forms, List senses, long revisionId) { super(id, statements, revisionId); Validate.notNull(lexicalCategory, "Lexeme lexical category should not be null"); this.lexicalCategory = lexicalCategory; Validate.notNull(language, "Lexeme language should not be null"); this.language = language; this.lemmas = (lemmas == null || lemmas.isEmpty()) ? Collections.emptyMap() : constructTermMap(lemmas); this.forms = (forms == null) ? Collections.emptyList() : forms; this.senses = (senses == null) ? Collections.emptyList() : senses; nextFormId = nextChildEntityId(this.forms); nextSenseId = nextChildEntityId(this.senses); } /** * Constructor. Creates an object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. */ @JsonCreator public LexemeDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("lexicalCategory") String lexicalCategory, @JsonProperty("language") String language, @JsonProperty("lemmas") @JsonDeserialize(contentAs = TermImpl.class) Map lemmas, @JsonProperty("claims") Map> claims, @JsonProperty("forms") @JsonDeserialize(contentAs = FormDocumentImpl.class) List forms, @JsonProperty("senses") @JsonDeserialize(contentAs = SenseDocumentImpl.class) List senses, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, claims, revisionId, siteIri); Validate.notNull(lexicalCategory, "Lexeme lexical category should not be null"); this.lexicalCategory = new ItemIdValueImpl(lexicalCategory, siteIri); Validate.notNull(language, "Lexeme language should not be null"); this.language = new ItemIdValueImpl(language, siteIri); this.lemmas = (lemmas == null) ? Collections.emptyMap() : lemmas; this.forms = (forms == null) ? Collections.emptyList() : forms; this.senses = (senses == null) ? Collections.emptyList() : senses; nextFormId = nextChildEntityId(this.forms); nextSenseId = nextChildEntityId(this.senses); } /** * Copy constructor, used when creating modified copies of lexemes. */ private LexemeDocumentImpl( LexemeIdValue id, ItemIdValue lexicalCategory, ItemIdValue language, Map lemmas, Map> statements, List forms, List senses, long revisionId, int nextFormId, int nextSenseId) { super(id, statements, revisionId); this.lexicalCategory = lexicalCategory; this.language = language; this.lemmas = lemmas; this.forms = forms; this.senses = senses; this.nextFormId = nextFormId; this.nextSenseId = nextSenseId; } private static Map constructTermMap(List terms) { Map map = new HashMap<>(); for(MonolingualTextValue term : terms) { String language = term.getLanguageCode(); if(map.containsKey(language)) { throw new IllegalArgumentException("Multiple terms provided for the same language."); } // We need to make sure the terms are of the right type, otherwise they will not // be serialized correctly. map.put(language, toTerm(term)); } return map; } private static MonolingualTextValue toTerm(MonolingualTextValue term) { return (term instanceof TermImpl) ? term : new TermImpl(term.getLanguageCode(), term.getText()); } private static final Pattern CHILD_ID_PATTERN = Pattern.compile("^L\\d+-[FS]([1-9]\\d*)$"); private static int nextChildEntityId(List childrenDocuments) { int maxId = 0; for(EntityDocument document : childrenDocuments) { Matcher matcher = CHILD_ID_PATTERN.matcher(document.getEntityId().getId()); if(matcher.matches()) { maxId = Math.max(maxId, Integer.parseInt(matcher.group(1))); } else { throw new IllegalArgumentException("Invalid child entity id " + document.getEntityId()); } } return maxId + 1; } @JsonIgnore @Override public LexemeIdValue getEntityId() { return new LexemeIdValueImpl(entityId, siteIri); } @JsonIgnore @Override public ItemIdValue getLexicalCategory() { return lexicalCategory; } @JsonProperty("lexicalCategory") String getJsonLexicalCategory() { return lexicalCategory.getId(); } @JsonIgnore @Override public ItemIdValue getLanguage() { return language; } @JsonProperty("language") String getJsonLanguage() { return language.getId(); } @JsonProperty("lemmas") @Override public Map getLemmas() { return lemmas; } @JsonProperty("forms") @Override public List getForms() { return forms; } @JsonProperty("senses") @Override public List getSenses() { return senses; } @JsonIgnore @Override public FormDocument getForm(FormIdValue formId) { for(FormDocument form : forms) { if(form.getEntityId().equals(formId)) { return form; } } throw new IndexOutOfBoundsException("There is no " + formId + " in the lexeme."); } @JsonIgnore @Override public SenseDocument getSense(SenseIdValue senseId) { for(SenseDocument sense : senses) { if(sense.getEntityId().equals(senseId)) { return sense; } } throw new IndexOutOfBoundsException("There is no " + senseId + " in the lexeme."); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsLexemeDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public LexemeDocument withEntityId(LexemeIdValue newEntityId) { return new LexemeDocumentImpl(newEntityId, lexicalCategory, language, lemmas, claims, forms, senses, revisionId, nextFormId, nextSenseId); } @Override public LexemeDocument withLexicalCategory(ItemIdValue newLexicalCategory) { return new LexemeDocumentImpl(getEntityId(), newLexicalCategory, language, lemmas, claims, forms, senses, revisionId, nextFormId, nextSenseId); } @Override public LexemeDocument withLanguage(ItemIdValue newLanguage) { return new LexemeDocumentImpl(getEntityId(), lexicalCategory, newLanguage, lemmas, claims, forms, senses, revisionId, nextFormId, nextSenseId); } @Override public LexemeDocument withLemma(MonolingualTextValue lemma) { Map newLemmas = new HashMap<>(lemmas); newLemmas.put(lemma.getLanguageCode(), toTerm(lemma)); return new LexemeDocumentImpl(getEntityId(), lexicalCategory, language, newLemmas, claims, forms, senses, revisionId, nextFormId, nextSenseId); } @Override public LexemeDocument withStatement(Statement statement) { Map> newGroups = addStatementToGroups(statement, claims); return new LexemeDocumentImpl(getEntityId(), lexicalCategory, language, lemmas, newGroups, forms, senses, revisionId, nextFormId, nextSenseId); } @Override public LexemeDocument withoutStatementIds(Set statementIds) { Map> newGroups = removeStatements(statementIds, claims); return new LexemeDocumentImpl(getEntityId(), lexicalCategory, language, lemmas, newGroups, forms, senses, revisionId, nextFormId, nextSenseId); } @Override public FormDocument createForm(List representations) { FormIdValue newFormId = new FormIdValueImpl(entityId + "-F" + nextFormId, siteIri); nextFormId++; return new FormDocumentImpl(newFormId, representations, Collections.emptyList(), Collections.emptyList(), revisionId); } @Override public LexemeDocument withForm(FormDocument form) { if(!form.getEntityId().getLexemeId().equals(getEntityId())) { throw new IllegalArgumentException("The form " + form.getEntityId() + " does not belong to lexeme " + getEntityId()); } List newForms = new ArrayList<>(forms); newForms.add(form); return new LexemeDocumentImpl(getEntityId(), lexicalCategory, language, lemmas, claims, newForms, senses, revisionId, nextFormId, nextSenseId); } @Override public SenseDocument createSense(List glosses) { SenseIdValue newSenseId = new SenseIdValueImpl(entityId + "-S" + nextSenseId, siteIri); nextSenseId++; return new SenseDocumentImpl(newSenseId, glosses, Collections.emptyList(), revisionId); } @Override public LexemeDocument withSense(SenseDocument sense) { if(!sense.getEntityId().getLexemeId().equals(getEntityId())) { throw new IllegalArgumentException("The sense " + sense.getEntityId() + " does not belong to lexeme " + getEntityId()); } List newSenses = new ArrayList<>(senses); newSenses.add(sense); return new LexemeDocumentImpl(getEntityId(), lexicalCategory, language, lemmas, claims, forms, newSenses, revisionId, nextFormId, nextSenseId); } @Override public LexemeDocument withRevisionId(long newRevisionId) { return new LexemeDocumentImpl(getEntityId(), lexicalCategory, language, lemmas, claims, forms, senses, newRevisionId, nextFormId, nextSenseId); } } LexemeIdValueImpl.java000066400000000000000000000052161444772566300367600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Jackson implementation of {@link LexemeIdValue}. * * @author Thomas Pellissier Tanon * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class LexemeIdValueImpl extends EntityIdValueImpl implements LexemeIdValue { /** * Constructor. * * @param id * the identifier of the entity, such as "Q42" * @param siteIri * the siteIRI that this value refers to */ LexemeIdValueImpl( String id, String siteIri) { super(id, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_LEXEME); } /** * Constructor used for deserialization with Jackson. * * @param value * the inner JSON object deserialized as a {@link JacksonInnerEntityId} * @param siteIri * the siteIRI that this value refers to. */ @JsonCreator LexemeIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject("siteIri") String siteIri) { super(value, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_LEXEME); } @JsonIgnore @Override public String getEntityType() { return EntityIdValue.ET_LEXEME; } @JsonIgnore @Override public boolean isPlaceholder() { return getId().equals("L0"); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityIdValue(this, obj); } @Override public String toString() { return ToString.toString(this); } } LexemeUpdateImpl.java000066400000000000000000000272161444772566300366550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toMap; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link LexemeUpdate}. */ public class LexemeUpdateImpl extends StatementDocumentUpdateImpl implements LexemeUpdate { @JsonIgnore private final ItemIdValue language; @JsonIgnore private final ItemIdValue lexicalCategory; @JsonIgnore private final TermUpdate lemmas; @JsonIgnore private final List addedSenses; @JsonIgnore private final Map updatedSenses; @JsonIgnore private final Set removedSenses; @JsonIgnore private final List addedForms; @JsonIgnore private final Map updatedForms; @JsonIgnore private final Set removedForms; /** * Initializes new entity update. * * @param entityId * ID of the lexeme that is to be updated * @param revisionId * base lexeme revision to be updated or zero if not available * @param language * new lexeme language or {@code null} for no change * @param lexicalCategory * new lexical category of the lexeme or {@code null} for no change * @param lemmas * changes in lemmas, possibly empty * @param statements * changes in entity statements, possibly empty * @param addedSenses * added senses * @param updatedSenses * updated senses * @param removedSenses * IDs of removed senses * @param addedForms * added forms * @param updatedForms * updated forms * @param removedForms * IDs of removed forms * @throws NullPointerException * if any required parameter or its item is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public LexemeUpdateImpl( LexemeIdValue entityId, long revisionId, ItemIdValue language, ItemIdValue lexicalCategory, TermUpdate lemmas, StatementUpdate statements, Collection addedSenses, Collection updatedSenses, Collection removedSenses, Collection addedForms, Collection updatedForms, Collection removedForms) { super(entityId, revisionId, statements); Validate.isTrue(language == null || !language.isPlaceholder(), "Language cannot be a placeholder ID."); this.language = language; Validate.isTrue( lexicalCategory == null || !lexicalCategory.isPlaceholder(), "Lexical category cannot be a placeholder ID."); this.lexicalCategory = lexicalCategory; Objects.requireNonNull(lemmas, "Lemma update cannot be null."); this.lemmas = lemmas; Objects.requireNonNull(addedSenses, "List of added senses cannot be null."); for (SenseDocument sense : addedSenses) { Objects.requireNonNull(sense, "Added sense cannot be null."); Validate.isTrue(sense.getEntityId().isPlaceholder(), "Added sense must have placeholder ID."); } this.addedSenses = Collections.unmodifiableList(new ArrayList<>(addedSenses)); Objects.requireNonNull(updatedSenses, "List of sense updates cannot be null."); for (SenseUpdate update : updatedSenses) { Objects.requireNonNull(update, "Sense update cannot be null."); Validate.isTrue(update.getBaseRevisionId() == revisionId, "Nested sense update must have the same revision ID as lexeme update."); } Validate.isTrue( updatedSenses.stream().map(s -> s.getEntityId()).distinct().count() == updatedSenses.size(), "Cannot apply two updates to the same sense."); this.updatedSenses = Collections.unmodifiableMap(updatedSenses.stream() .filter(s -> !s.isEmpty()) .collect(toMap(s -> s.getEntityId(), s -> s))); Objects.requireNonNull(removedSenses, "List of removed sense IDs cannot be null."); for (SenseIdValue senseId : removedSenses) { Objects.requireNonNull(senseId, "Removed sense cannot have null ID."); Validate.isTrue(!senseId.isPlaceholder(), "Removed sense cannot have placeholder ID."); } Validate.isTrue( removedSenses.stream().distinct().count() == removedSenses.size(), "Cannot remove the same sense twice."); this.removedSenses = Collections.unmodifiableSet(new HashSet<>(removedSenses)); Validate.isTrue( updatedSenses.stream().noneMatch(s -> this.removedSenses.contains(s.getEntityId())), "Cannot remove sense that is being updated."); Objects.requireNonNull(addedForms, "List of added forms cannot be null."); for (FormDocument form : addedForms) { Objects.requireNonNull(form, "Added form cannot be null."); Validate.isTrue(form.getEntityId().isPlaceholder(), "Added form must have placeholder ID."); } this.addedForms = Collections.unmodifiableList(new ArrayList<>(addedForms)); Objects.requireNonNull(updatedForms, "List of form updates cannot be null."); for (FormUpdate update : updatedForms) { Objects.requireNonNull(update, "Form update cannot be null."); Validate.isTrue(update.getBaseRevisionId() == revisionId, "Nested form update must have the same revision ID as lexeme update."); } Validate.isTrue( updatedForms.stream().map(s -> s.getEntityId()).distinct().count() == updatedForms.size(), "Cannot apply two updates to the same form."); this.updatedForms = Collections.unmodifiableMap(updatedForms.stream() .filter(f -> !f.isEmpty()) .collect(toMap(f -> f.getEntityId(), f -> f))); Objects.requireNonNull(removedForms, "List of removed form IDs cannot be null."); for (FormIdValue formId : removedForms) { Objects.requireNonNull(formId, "Removed form cannot have null ID."); Validate.isTrue(!formId.isPlaceholder(), "Removed form cannot have placeholder ID."); } Validate.isTrue( removedForms.stream().distinct().count() == removedForms.size(), "Cannot remove the same form twice."); this.removedForms = Collections.unmodifiableSet(new HashSet<>(removedForms)); Validate.isTrue( updatedForms.stream().noneMatch(s -> this.removedForms.contains(s.getEntityId())), "Cannot remove form that is being updated."); } @JsonIgnore @Override public LexemeIdValue getEntityId() { return (LexemeIdValue) super.getEntityId(); } @JsonIgnore @Override public boolean isEmpty() { return super.isEmpty() && language == null && lexicalCategory == null && lemmas.isEmpty() && addedSenses.isEmpty() && updatedSenses.isEmpty() && removedSenses.isEmpty() && addedForms.isEmpty() && updatedForms.isEmpty() && removedForms.isEmpty(); } @JsonIgnore @Override public Optional getLanguage() { return Optional.ofNullable(language); } @JsonProperty("language") @JsonInclude(Include.NON_NULL) String getJsonLanguage() { return language != null ? language.getId() : null; } @JsonIgnore @Override public Optional getLexicalCategory() { return Optional.ofNullable(lexicalCategory); } @JsonProperty("lexicalCategory") @JsonInclude(Include.NON_NULL) String getJsonLexicalCategory() { return lexicalCategory != null ? lexicalCategory.getId() : null; } @JsonIgnore @Override public TermUpdate getLemmas() { return lemmas; } @JsonProperty("lemmas") @JsonInclude(Include.NON_NULL) TermUpdate getJsonLemmas() { return lemmas.isEmpty() ? null : lemmas; } @JsonIgnore @Override public List getAddedSenses() { return addedSenses; } @JsonIgnore @Override public Map getUpdatedSenses() { return updatedSenses; } @JsonIgnore @Override public Set getRemovedSenses() { return removedSenses; } @JsonIgnore @Override public List getAddedForms() { return addedForms; } @JsonIgnore @Override public Map getUpdatedForms() { return updatedForms; } @JsonIgnore @Override public Set getRemovedForms() { return removedForms; } static class AddedSense extends SenseDocumentImpl { AddedSense(SenseDocument sense) { super(SenseIdValue.NULL, new ArrayList<>(sense.getGlosses().values()), sense.getStatementGroups(), sense.getRevisionId()); } @JsonProperty("add") public String getAddCommand() { return ""; } } static class RemovedSense { @JsonIgnore private final SenseIdValue id; RemovedSense(SenseIdValue id) { this.id = id; } @JsonProperty String getId() { return id.getId(); } @JsonProperty("remove") String getRemoveCommand() { return ""; } } @JsonProperty @JsonInclude(Include.NON_EMPTY) List getSenses() { List list = new ArrayList<>(); for (SenseDocument sense : addedSenses) { list.add(new AddedSense(sense)); } list.addAll(updatedSenses.values()); for (SenseIdValue id : removedSenses) { list.add(new RemovedSense(id)); } return list; } static class AddedForm extends FormDocumentImpl { AddedForm(FormDocument form) { super(FormIdValue.NULL, new ArrayList<>(form.getRepresentations().values()), form.getGrammaticalFeatures(), form.getStatementGroups(), form.getRevisionId()); } @JsonProperty("add") public String getAddCommand() { return ""; } } static class RemovedForm { @JsonIgnore private final FormIdValue id; RemovedForm(FormIdValue id) { this.id = id; } @JsonProperty String getId() { return id.getId(); } @JsonProperty("remove") String getRemoveCommand() { return ""; } } @JsonProperty @JsonInclude(Include.NON_EMPTY) List getForms() { List list = new ArrayList<>(); for (FormDocument form : addedForms) { list.add(new AddedForm(form)); } list.addAll(updatedForms.values()); for (FormIdValue id : removedForms) { list.add(new RemovedForm(id)); } return list; } @Override public boolean equals(Object obj) { return Equality.equalsLexemeUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } MediaInfoDocumentImpl.java000066400000000000000000000110661444772566300376210ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; /** * Jackson implementation of {@link MediaInfoDocument}. *

* We serialize back statements in JSON with the "claims" key for compatibility with the other entity types * and the wbeditentity API mdodule * * @author Thomas Pellissier Tanon */ @JsonIgnoreProperties(ignoreUnknown = true) public class MediaInfoDocumentImpl extends LabeledStatementDocumentImpl implements MediaInfoDocument { /** * Constructor. * * @param id * the id of the media that data is about * @param labels * the list of captions of this media, with at most one label for * each language code * @param statements * the list of statement groups of this media info; all of them must * have the given itemIdValue as their subject * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} */ public MediaInfoDocumentImpl( MediaInfoIdValue id, List labels, List statements, long revisionId) { super(id, labels, statements, revisionId); } /** * Constructor. Creates an object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. * * The claims parameter is here for compatibility with wbeditentity. */ @JsonCreator public MediaInfoDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("labels") @JsonDeserialize(contentAs=TermImpl.class) Map labels, @JsonProperty("claims") Map> claims, @JsonProperty("statements") Map> statements, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, labels, (statements == null) ? claims : statements, revisionId, siteIri); } /** * Protected constructor, meant to be used to create modified copies * of instances. */ protected MediaInfoDocumentImpl( MediaInfoIdValue subject, Map labels, Map> claims, long revisionId) { super(subject, labels, claims, revisionId); } @JsonIgnore @Override public MediaInfoIdValue getEntityId() { return new MediaInfoIdValueImpl(entityId, siteIri); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsMediaInfoDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public MediaInfoDocument withEntityId(MediaInfoIdValue newEntityId) { return new MediaInfoDocumentImpl(newEntityId, labels, claims, revisionId); } @Override public MediaInfoDocument withRevisionId(long newRevisionId) { return new MediaInfoDocumentImpl(getEntityId(), labels, claims, newRevisionId); } @Override public MediaInfoDocument withLabel(MonolingualTextValue newLabel) { return new MediaInfoDocumentImpl(getEntityId(), withTerm(labels, newLabel), claims, revisionId); } @Override public MediaInfoDocument withStatement(Statement statement) { Map> newGroups = addStatementToGroups(statement, claims); return new MediaInfoDocumentImpl(getEntityId(), labels, newGroups, revisionId); } @Override public MediaInfoDocument withoutStatementIds(Set statementIds) { Map> newGroups = removeStatements(statementIds, claims); return new MediaInfoDocumentImpl(getEntityId(), labels, newGroups, revisionId); } } MediaInfoIdValueImpl.java000066400000000000000000000052531444772566300373750ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Jackson implementation of {@link MediaInfoIdValue}. * * @author Thomas Pellissier Tanon * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class MediaInfoIdValueImpl extends EntityIdValueImpl implements MediaInfoIdValue { /** * Constructor. * * @param id * the identifier of the entity, such as "M42" * @param siteIri * the siteIRI that this value refers to */ MediaInfoIdValueImpl( String id, String siteIri) { super(id, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_MEDIA_INFO); } /** * Constructor used for deserialization with Jackson. * * @param value * the inner JSON object deserialized as a {@link JacksonInnerEntityId} * @param siteIri * the siteIRI that this value refers to. */ @JsonCreator MediaInfoIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject("siteIri") String siteIri) { super(value, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_MEDIA_INFO); } @JsonIgnore @Override public String getEntityType() { return EntityIdValue.ET_MEDIA_INFO; } @JsonIgnore @Override public boolean isPlaceholder() { return getId().equals("M0"); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityIdValue(this, obj); } @Override public String toString() { return ToString.toString(this); } } MediaInfoUpdateImpl.java000066400000000000000000000044611444772566300372660ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; /** * Jackson implementation of {@link MediaInfoUpdate}. */ public class MediaInfoUpdateImpl extends LabeledDocumentUpdateImpl implements MediaInfoUpdate { /** * Initializes new media update. * * @param entityId * ID of the media that is to be updated * @param revisionId * base media revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public MediaInfoUpdateImpl( MediaInfoIdValue entityId, long revisionId, TermUpdate labels, StatementUpdate statements) { super(entityId, revisionId, labels, statements); } @JsonIgnore @Override public MediaInfoIdValue getEntityId() { return (MediaInfoIdValue) super.getEntityId(); } @Override public boolean equals(Object obj) { return Equality.equalsMediaInfoUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } MonolingualTextValueImpl.java000066400000000000000000000104521444772566300404130ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link MonolingualTextValue}. Java attributes are * named equally to the JSON fields. Deviations are due to different naming in * the implemented interfaces. The "value" in this JSON context is called * "text". *

* The class extends {@link ValueImpl} which adds a type association done by * the JSON. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class MonolingualTextValueImpl extends ValueImpl implements MonolingualTextValue { /** * Inner helper object to store the actual data. Used to get the nested JSON * structure that is required here. */ private final JacksonInnerMonolingualText value; /** * Constructor. */ public MonolingualTextValueImpl(String text, String language) { super(JSON_VALUE_TYPE_MONOLINGUAL_TEXT); this.value = new JacksonInnerMonolingualText(language, text); } /** * Constructor used for deserialization from JSON with Jackson. */ @JsonCreator MonolingualTextValueImpl( @JsonProperty("value") JacksonInnerMonolingualText value) { super(JSON_VALUE_TYPE_MONOLINGUAL_TEXT); this.value = value; } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner monolingual text value */ public JacksonInnerMonolingualText getValue() { return this.value; } @JsonIgnore @Override public String getText() { return this.value.getText(); } @JsonIgnore @Override public String getLanguageCode() { return this.value.getLanguage(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsMonolingualTextValue(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerMonolingualText { private final String language; private final String text; /** * Constructor. * * @param language * the Wikimedia language code * @param text * the text of the value */ @JsonCreator JacksonInnerMonolingualText( @JsonProperty("language") String language, @JsonProperty("text") String text) { Validate.notNull(language, "A language has to be provided to create a MonolingualTextValue"); this.language = language; Validate.notNull(text, "A text has to be provided to create a MonolingualTextValue"); this.text = text; } /** * Returns the language code. * * @see MonolingualTextValue#getLanguageCode() * @return language code */ public String getLanguage() { return this.language; } /** * Returns the text. * * @see MonolingualTextValue#getText() * @return text */ public String getText() { return this.text; } } } NoValueSnakImpl.java000066400000000000000000000045371444772566300364620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SnakVisitor; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link NoValueSnak}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) public class NoValueSnakImpl extends SnakImpl implements NoValueSnak { /** * Constructor. * * @param property * the property id used by this no value snak */ public NoValueSnakImpl(PropertyIdValue property) { super(property); } /** * Constructor for deserialization from JSON with Jackson. */ @JsonCreator protected NoValueSnakImpl( @JsonProperty("property") String property, @JacksonInject("siteIri") String siteIri) { super(property, siteIri); } @Override public T accept(SnakVisitor snakVisitor) { return snakVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsNoValueSnak(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override @JsonProperty("snaktype") public String getSnakType() { return SnakImpl.JSON_SNAK_TYPE_NOVALUE; } } PropertyDocumentImpl.java000066400000000000000000000164411444772566300376140ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.List; import java.util.Map; import java.util.Set; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link PropertyDocument}. * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Markus Kroetzsch * */ @JsonIgnoreProperties(ignoreUnknown = true) public class PropertyDocumentImpl extends TermedStatementDocumentImpl implements PropertyDocument { /** * Datatype of the property. This is internally stored as * a Jackson object because we need to be able to serialize * it directly to JSON as a field. */ private final DatatypeIdImpl datatype; /** * Constructor for instances that are built manually, rather than from JSON. * * @param id * the id of the property that data is about * @param labels * the list of labels of this property, with at most one label * for each language code * @param descriptions * the list of descriptions of this property, with at most one * description for each language code * @param aliases * the list of aliases of this property * @param statements * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param datatype * the datatype of that property * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} */ public PropertyDocumentImpl( PropertyIdValue id, List labels, List descriptions, List aliases, List statements, DatatypeIdValue datatype, long revisionId) { super(id, labels, descriptions, aliases, statements, revisionId); this.datatype = new DatatypeIdImpl(datatype); } /** * Constructor. Creates an instance by deserializing from JSON. */ @JsonCreator public PropertyDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("labels") @JsonDeserialize(contentAs=TermImpl.class) Map labels, @JsonProperty("descriptions") @JsonDeserialize(contentAs=TermImpl.class) Map descriptions, @JsonProperty("aliases") @JsonDeserialize(using = AliasesDeserializer.class) Map> aliases, @JsonProperty("claims") Map> claims, @JsonProperty("datatype") String datatype, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, labels, descriptions, aliases, claims, revisionId, siteIri); this.datatype = new DatatypeIdImpl(DatatypeIdImpl.getDatatypeIriFromJsonDatatype(datatype), datatype); } /** * Private constructor, meant to be used to create modified copies * of instances. */ private PropertyDocumentImpl( PropertyIdValue id, Map labels, Map descriptions, Map> aliases, Map> claims, DatatypeIdValue datatypeId, long revisionId) { super(id, labels, descriptions, aliases, claims, revisionId); this.datatype = new DatatypeIdImpl(datatypeId); } /** * Returns the JSON string version of the property's datatype. Note that * {@link #getDatatype()} is already used for another function of the * interface. * * @return string datatype */ @JsonProperty("datatype") public String getJsonDatatype() { return this.datatype.getJsonString(); } @JsonIgnore @Override public PropertyIdValue getEntityId() { return new PropertyIdValueImpl(entityId, siteIri); } @JsonIgnore @Override public DatatypeIdValue getDatatype() { return new DatatypeIdImpl(this.datatype); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsPropertyDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public PropertyDocument withEntityId(PropertyIdValue newEntityId) { return new PropertyDocumentImpl(newEntityId, labels, descriptions, aliases, claims, datatype, revisionId); } @Override public PropertyDocument withRevisionId(long newRevisionId) { return new PropertyDocumentImpl(getEntityId(), labels, descriptions, aliases, claims, datatype, newRevisionId); } @Override public PropertyDocument withLabel(MonolingualTextValue newLabel) { return new PropertyDocumentImpl(getEntityId(), withTerm(labels, newLabel), descriptions, aliases, claims, datatype, revisionId); } @Override public PropertyDocument withDescription(MonolingualTextValue newDescription) { return new PropertyDocumentImpl(getEntityId(), labels, withTerm(descriptions, newDescription), aliases, claims, datatype, revisionId); } @Override public PropertyDocument withAliases(String language, List aliases) { return new PropertyDocumentImpl(getEntityId(), labels, descriptions, withAliases(this.aliases, language, aliases), claims, datatype, revisionId); } @Override public PropertyDocument withStatement(Statement statement) { Map> newGroups = addStatementToGroups(statement, claims); return new PropertyDocumentImpl(getEntityId(), labels, descriptions, aliases, newGroups, datatype, revisionId); } @Override public PropertyDocument withoutStatementIds(Set statementIds) { Map> newGroups = removeStatements(statementIds, claims); return new PropertyDocumentImpl(getEntityId(), labels, descriptions, aliases, newGroups, datatype, revisionId); } } PropertyIdValueImpl.java000066400000000000000000000053711444772566300373670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link PropertyIdValue}. * * @author Markus Kroetzsch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class PropertyIdValueImpl extends EntityIdValueImpl implements PropertyIdValue { /** * Constructor. * * @param id * the identifier of the entity, such as "P42" * @param siteIri * the siteIRI that this value refers to */ public PropertyIdValueImpl( String id, String siteIri) { super(id, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_PROPERTY); } /** * Constructor used to deserialize an object from JSON with Jackson */ @JsonCreator PropertyIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject("siteIri") String siteIri) { super(value, siteIri); assertHasJsonEntityType(JSON_ENTITY_TYPE_PROPERTY); } @JsonIgnore @Override public String getEntityType() { return EntityIdValue.ET_PROPERTY; } @JsonIgnore @Override public boolean isPlaceholder() { return getId().equals("P0"); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityIdValue(this, obj); } @Override public String toString() { return ToString.toString(this); } } PropertyUpdateImpl.java000066400000000000000000000052461444772566300372610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.Map; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; /** * Jackson implementation of {@link PropertyUpdate}. */ public class PropertyUpdateImpl extends TermedDocumentUpdateImpl implements PropertyUpdate { /** * Initializes new property update. * * @param entityId * ID of the property entity that is to be updated * @param revisionId * base property entity revision to be updated or zero if not * available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions or {@code null} for no change * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public PropertyUpdateImpl( PropertyIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements) { super(entityId, revisionId, labels, descriptions, aliases, statements); } @JsonIgnore @Override public PropertyIdValue getEntityId() { return (PropertyIdValue) super.getEntityId(); } @Override public boolean equals(Object obj) { return Equality.equalsPropertyUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } QuantityValueImpl.java000066400000000000000000000213101444772566300370730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import java.math.BigDecimal; /** * Jackson implementation of {@link QuantityValue}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class QuantityValueImpl extends ValueImpl implements QuantityValue { /** * Inner helper object to store the actual data. Used to get the nested JSON * structure that is required here. */ private final JacksonInnerQuantity value; /** * Constructor. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity or null * if not set * @param upperBound * the upper bound of the numeric value of this quantity or null * if not set * @param unit * the unit of this quantity, or null if there is no * unit */ public QuantityValueImpl( BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, ItemIdValue unit) { super(JSON_VALUE_TYPE_QUANTITY); this.value = new JacksonInnerQuantity(numericValue, lowerBound, upperBound, unit); } /** * Constructor. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity or null * if not set * @param upperBound * the upper bound of the numeric value of this quantity or null * if not set * @param unit * the unit of this quantity, or the empty string if there is no * unit * @deprecated supply the unit as an ItemIdValue instead */ @Deprecated public QuantityValueImpl( BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, String unit) { super(JSON_VALUE_TYPE_QUANTITY); this.value = new JacksonInnerQuantity(numericValue, lowerBound, upperBound, "1".equals(unit) ? null : ItemIdValueImpl.fromIri(unit)); } /** * Constructor used for deserialization from JSON with Jackson. */ @JsonCreator QuantityValueImpl( @JsonProperty("value") JacksonInnerQuantity value) { super(JSON_VALUE_TYPE_QUANTITY); this.value = value; } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner quantity value */ public JacksonInnerQuantity getValue() { return this.value; } @JsonIgnore @Override public BigDecimal getNumericValue() { return this.value.getAmount(); } @JsonIgnore @Override public BigDecimal getLowerBound() { return this.value.getLowerBound(); } @JsonIgnore @Override public BigDecimal getUpperBound() { return this.value.getUpperBound(); } @JsonIgnore @Override public String getUnit() { return this.value.getUnit(); } @JsonIgnore @Override public ItemIdValue getUnitItemId() { return this.value.getUnitItemId(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsQuantityValue(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerQuantity { private final BigDecimal amount; private final BigDecimal upperBound; private final BigDecimal lowerBound; private final ItemIdValue unit; /** * Constructor for JSON deserialization. The unit given here is a unit string as used in WDTK, with * the string "1" meaning "no unit". * * @param amount * the main value of this quantity * @param lowerBound * the lower bound of this quantity * @param upperBound * the upper bound of this quantity * @param unit * the unit of this quantity, as an IRI to the relevant entity */ @JsonCreator JacksonInnerQuantity( @JsonProperty("amount") BigDecimal amount, @JsonProperty("lowerBound") BigDecimal lowerBound, @JsonProperty("upperBound") BigDecimal upperBound, @JsonProperty("unit") String unit) { this(amount, lowerBound, upperBound, parseUnit(unit)); } protected static ItemIdValue parseUnit(String unit) { Validate.notNull(unit, "Unit cannot be null"); Validate.notEmpty(unit, "Unit cannot be empty. Use \"1\" for unit-less quantities."); return "1".equals(unit) ? null : ItemIdValueImpl.fromIri(unit); } JacksonInnerQuantity( BigDecimal amount, BigDecimal lowerBound, BigDecimal upperBound, ItemIdValue unit) { Validate.notNull(amount, "Numeric value cannot be null"); if(lowerBound != null || upperBound != null) { Validate.notNull(lowerBound, "Lower and upper bounds should be null at the same time"); Validate.notNull(upperBound, "Lower and upper bounds should be null at the same time"); if (lowerBound.compareTo(amount) > 0) { throw new IllegalArgumentException( "Lower bound cannot be strictly greater than numeric value"); } if (amount.compareTo(upperBound) > 0) { throw new IllegalArgumentException( "Upper bound cannot be strictly smaller than numeric value"); } } this.amount = amount; this.upperBound = upperBound; this.lowerBound = lowerBound; this.unit = unit; } /** * Returns the numeric value. * * @see QuantityValue#getNumericValue() * @return the value */ @JsonIgnore BigDecimal getAmount() { return amount; } /** * Returns the upper bound. * * @see QuantityValue#getUpperBound() * @return the upper bound */ @JsonIgnore BigDecimal getUpperBound() { return upperBound; } /** * Returns the lower bound. * * @see QuantityValue#getLowerBound() * @return the lower bound */ @JsonIgnore BigDecimal getLowerBound() { return lowerBound; } @JsonProperty("amount") String getAmountAsString() { return bigDecimalToSignedString(this.amount); } @JsonProperty("upperBound") @JsonInclude(JsonInclude.Include.NON_NULL) String getUpperBoundAsString() { return this.upperBound != null ? bigDecimalToSignedString(this.upperBound) : null; } @JsonProperty("lowerBound") @JsonInclude(JsonInclude.Include.NON_NULL) String getLowerBoundAsString() { return this.lowerBound != null ? bigDecimalToSignedString(this.lowerBound) : null; } /** * Returns the string to use for the "unit" field in JSON. The value "1" is * used to denote "no unit"; otherwise an IRI is used to denote specific * units. * * @return unit string */ @JsonProperty("unit") String getUnit() { return unit == null ? "1" : unit.getIri(); } /** * Returns the unit represented as an {@link ItemIdValue}. * @return */ @JsonIgnore public ItemIdValue getUnitItemId() { return unit; } /** * Formats the string output with a leading signum as JSON expects it. */ private String bigDecimalToSignedString(BigDecimal value) { if (value.signum() < 0) { return value.toString(); } else { return "+" + value.toString(); } } } } ReferenceImpl.java000066400000000000000000000101051444772566300361560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.util.NestedIterator; import java.util.*; /** * Jackson implementation of {@link Reference}. * * @author Fredo Erxleben * @author Markus Kroetzsch * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) public class ReferenceImpl implements Reference { private List snakGroups; /** * Map of property id strings to snaks, as used to encode snaks in JSON. */ private final Map> snaks; /** * List of property string ids that encodes the desired order of snaks, * which is not specified by the map. */ private final List propertyOrder; /** * The wikidata hash of this reference. null if we don't have knowledge about the hash. */ private final String hash; /** * Constructor. *

* The order of the snaks groups provided will be respected. * the properties used by the snak groups should be distinct. * * @param groups * the snaks group which form the reference */ public ReferenceImpl(List groups) { propertyOrder = new ArrayList<>(groups.size()); snaks = new HashMap<>(groups.size()); hash = null; for(SnakGroup group : groups) { propertyOrder.add(group.getProperty().getId()); snaks.put(group.getProperty().getId(), group.getSnaks()); } } /** * Constructor for deserialization from JSON. */ @JsonCreator protected ReferenceImpl( @JsonProperty("snaks") Map> snaks, @JsonProperty("snaks-order") List propertyOrder, @JsonProperty("hash") String hash) { this.snaks = new HashMap<>(snaks.size()); for(Map.Entry> entry : snaks.entrySet()) { this.snaks.put(entry.getKey(), new ArrayList<>(entry.getValue())); } this.propertyOrder = propertyOrder; this.hash = hash; } @JsonIgnore @Override public List getSnakGroups() { if (this.snakGroups == null) { this.snakGroups = SnakGroupImpl.makeSnakGroups(this.snaks, this.propertyOrder); } return this.snakGroups; } /** * Returns the map of snaks as found in JSON. Only for use by Jackson during * serialization. * * @return the map of snaks */ @JsonProperty("snaks") public Map> getSnaks() { return Collections.unmodifiableMap(this.snaks); } @Override @JsonInclude(JsonInclude.Include.NON_NULL) public String getHash() { return hash; } /** * Returns the list of property ids used to order snaks as found in JSON. * Only for use by Jackson during serialization. * * @return the list of property ids */ @JsonProperty("snaks-order") public List getPropertyOrder() { return Collections.unmodifiableList(this.propertyOrder); } @Override @JsonIgnore public Iterator getAllSnaks() { return new NestedIterator<>(getSnakGroups()); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsReference(this, obj); } @Override public String toString() { return ToString.toString(this); } } SenseDocumentImpl.java000066400000000000000000000124741444772566300370470ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; /** * Jackson implementation of {@link SenseDocument}. * * @author Thomas Pellissier Tanon */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeInfo(use = JsonTypeInfo.Id.NONE) public class SenseDocumentImpl extends StatementDocumentImpl implements SenseDocument { private final Map glosses; /** * Constructor. * * @param id * the id of the le that data is about * @param glosses * the list of glosses of this lexeme, with at most one * lemma for each language code * @param statements * the list of statement groups of this lexeme; all of them must * have the given id as their subject * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} */ SenseDocumentImpl( SenseIdValue id, List glosses, List statements, long revisionId) { super(id, statements, revisionId); this.glosses = (glosses == null || glosses.isEmpty()) ? Collections.emptyMap() : constructTermMap(glosses); } /** * Constructor. Creates an object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. */ @JsonCreator SenseDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("glosses") @JsonDeserialize(contentAs=TermImpl.class) Map glosses, @JsonProperty("claims") Map> claims, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, claims, revisionId, siteIri); this.glosses = (glosses == null) ? Collections.emptyMap() : glosses; } /** * Copy constructor, used when creating modified copies of senses. */ private SenseDocumentImpl( SenseIdValue subject, Map glosses, Map> claims, long revisionId) { super(subject, claims, revisionId); this.glosses = glosses; } private static Map constructTermMap(List terms) { Map map = new HashMap<>(); for(MonolingualTextValue term : terms) { String language = term.getLanguageCode(); if(map.containsKey(language)) { throw new IllegalArgumentException("Multiple terms provided for the same language."); } // We need to make sure the terms are of the right type, otherwise they will not // be serialized correctly. map.put(language, toTerm(term)); } return map; } private static MonolingualTextValue toTerm(MonolingualTextValue term) { return (term instanceof TermImpl) ? term : new TermImpl(term.getLanguageCode(), term.getText()); } @JsonIgnore @Override public SenseIdValue getEntityId() { return new SenseIdValueImpl(entityId, siteIri); } @JsonProperty("type") String getType() { return EntityDocumentImpl.JSON_TYPE_SENSE; } @JsonProperty("glosses") @Override public Map getGlosses() { return glosses; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsSenseDocument(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public SenseDocument withEntityId(SenseIdValue newEntityId) { return new SenseDocumentImpl(newEntityId, glosses, claims, revisionId); } @Override public SenseDocument withRevisionId(long newRevisionId) { return new SenseDocumentImpl(getEntityId(), glosses, claims, newRevisionId); } @Override public SenseDocument withGloss(MonolingualTextValue gloss) { Map newGlosses = new HashMap<>(glosses); newGlosses.put(gloss.getLanguageCode(), toTerm(gloss)); return new SenseDocumentImpl(getEntityId(), newGlosses, claims, revisionId); } @Override public SenseDocument withStatement(Statement statement) { return new SenseDocumentImpl(getEntityId(), glosses, addStatementToGroups(statement, claims), revisionId); } @Override public SenseDocument withoutStatementIds(Set statementIds) { return new SenseDocumentImpl(getEntityId(), glosses, removeStatements(statementIds, claims), revisionId); } } SenseIdValueImpl.java000066400000000000000000000106251444772566300366160ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import java.util.regex.Pattern; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Jackson implementation of {@link SenseIdValue}. * TODO: It is not possible to use it as statement value yet. * * @author Thomas Pellissier Tanon * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class SenseIdValueImpl extends ValueImpl implements SenseIdValue { /* * Allow L0-S0 from SenseIdValue.NULL. */ private static final Pattern PATTERN = Pattern.compile("L[1-9]\\d*-S[1-9]\\d*|L0-S0"); private final String id; private final String siteIri; /** * Constructor. * * @param id * the identifier of the entity, such as "L42-F43" * @param siteIri * the siteIRI that this value refers to */ SenseIdValueImpl( String id, String siteIri) { super(JSON_VALUE_TYPE_ENTITY_ID); if(id == null || !PATTERN.matcher(id).matches()) { throw new IllegalArgumentException("The string " + id + " is not a valid form id"); } this.id = id; Validate.notNull(siteIri); this.siteIri = siteIri; } /** * Constructor used for deserialization with Jackson. */ @JsonCreator SenseIdValueImpl( @JsonProperty("value") JacksonInnerEntityId value, @JacksonInject("siteIri") String siteIri) { this(value.getStringId(), siteIri); } @JsonIgnore @Override public String getEntityType() { return EntityIdValue.ET_SENSE; } @JsonIgnore @Override public String getId() { return id; } @JsonIgnore @Override public String getSiteIri() { return siteIri; } @JsonIgnore @Override public String getIri() { return siteIri + id; } @JsonIgnore @Override public boolean isPlaceholder() { return id.equals("L0-S0"); } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner entity id value */ @JsonProperty("value") JacksonInnerEntityId getValue() { return new JacksonInnerEntityId(id); } @JsonIgnore @Override public LexemeIdValue getLexemeId() { return new LexemeIdValueImpl(id.substring(0, id.indexOf("-")), siteIri); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsEntityIdValue(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerEntityId { private final String id; @JsonCreator JacksonInnerEntityId( @JsonProperty("id") String id ) { this.id = id; } /** * Returns the entity type string as used in JSON. Only for use by Jackson * during serialization. * * @return the entity type string */ @JsonProperty("entity-type") String getJsonEntityType() { return "sense"; } /** * Returns the standard string version of the entity id encoded in this * value. For example, an id with entityType "item" and numericId "42" is * normally identified as "Q42". * * @return the string id */ @JsonProperty("id") String getStringId() { return id; } } } SenseUpdateImpl.java000066400000000000000000000056121444772566300365070ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.Objects; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link SenseUpdate}. */ public class SenseUpdateImpl extends StatementDocumentUpdateImpl implements SenseUpdate { @JsonIgnore private final TermUpdate glosses; /** * Initializes new sense update. * * @param entityId * ID of the sense that is to be updated * @param revisionId * base sense revision to be updated or zero if not available * @param glosses * changes in sense glosses, possibly empty * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public SenseUpdateImpl( SenseIdValue entityId, long revisionId, TermUpdate glosses, StatementUpdate statements) { super(entityId, revisionId, statements); Objects.requireNonNull(glosses, "Gloss update cannot be null."); this.glosses = glosses; } @JsonIgnore @Override public SenseIdValue getEntityId() { return (SenseIdValue) super.getEntityId(); } @JsonIgnore @Override public boolean isEmpty() { return super.isEmpty() && glosses.isEmpty(); } @JsonIgnore @Override public TermUpdate getGlosses() { return glosses; } @JsonProperty("glosses") @JsonInclude(Include.NON_EMPTY) TermUpdate getJsonGlosses() { return glosses.isEmpty() ? null : glosses; } @Override public boolean equals(Object obj) { return Equality.equalsSenseUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } SiteLinkImpl.java000066400000000000000000000075031444772566300360120ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * Jackson implementation of {@link SiteLink}. * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Thomas Pellissier Tanon */ @JsonIgnoreProperties(ignoreUnknown = true) public class SiteLinkImpl implements SiteLink { private final String title; private final String site; private final List badges; /** * Constructor. * * @param title * the title of the page on the target site * @param site * the identifier of the target site (such as "dewiki") * @param badges * the list of badge identifiers worn by this site link. * Can be null. */ public SiteLinkImpl( String title, String site, List badges) { Validate.notNull(title); this.title = title; Validate.notNull(site); this.site = site; this.badges = (badges == null) ? Collections.emptyList() : badges; this.badges.sort(Comparator.comparing(EntityIdValue::getId)); } /** * Constructor. * * @param title * the title of the page on the target site * @param site * the identifier of the target site (such as "dewiki") * @param badges * the list of badge identifiers worn by this site link. * Can be null. */ @JsonCreator SiteLinkImpl( @JsonProperty("title") String title, @JsonProperty("site") String site, @JsonProperty("badges") List badges, @JacksonInject("siteIri") String siteIri ) { Validate.notNull(title); this.title = title; Validate.notNull(site); this.site = site; this.badges = (badges == null || badges.isEmpty()) ? Collections.emptyList() : constructBadges(badges, siteIri); } private List constructBadges(List badges, String siteIri) { List output = new ArrayList<>(badges.size()); for(String badge : badges) { output.add(new ItemIdValueImpl(badge, siteIri)); } return output; } @JsonProperty("title") @Override public String getPageTitle() { return this.title; } @JsonProperty("site") @Override public String getSiteKey() { return this.site; } @JsonIgnore @Override public List getBadges() { return this.badges; } @JsonProperty("badges") List getBadgesString() { if (badges.isEmpty()) { return Collections.emptyList(); } List output = new ArrayList<>(badges.size()); for(ItemIdValue badge : badges) { output.add(badge.getId()); } return output; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsSiteLink(this, obj); } @Override public String toString() { return ToString.toString(this); } } SitesImpl.java000066400000000000000000000150171444772566300353560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.HashMap; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Sites; /** * Implementation of the {@link Sites} interface that allows sites to be * registered. Objects of this type are not immutable, since they are not data * objects, but the {@link Sites} interface only supports read access. *

* This object supports protocol-relative URLs by adding the default protocol * {@link SitesImpl#DEFAULT_PROTOCOL_PREFIX} for these cases. * * @author Markus Kroetzsch * */ public class SitesImpl implements Sites { /** * MediaWiki supports relative URLs in site configurations, which do not * start with "http://" or "https://", but with "//". The intended usage is * that generated links to this site will use either http or https depending * on the access method used to call the page. In Java, we do not have any * context, and we therefore define a default protocol to be used in such * cases. */ public static String DEFAULT_PROTOCOL_PREFIX = "https:"; /** * Simple record for holding information about a site. * * @author Markus Kroetzsch * */ static class SiteInformation { final String siteKey; final String group; final String languageCode; final String siteType; final String filePathPre; final String filePathPost; final String pagePathPre; final String pagePathPost; SiteInformation(String siteKey, String group, String languageCode, String siteType, String filePath, String pagePath) { // Null might be acceptable for some of the following; but this // should only be changed when we have a case where this is correct. Validate.notNull(siteKey, "Site key must not be null."); Validate.notNull(group, "Group must not be null."); Validate.notNull(languageCode, "Language code must not be null."); Validate.notNull(siteType, "Site type must not be null."); Validate.notNull(filePath, "File path must not be null."); Validate.notNull(pagePath, "Page path must not be null."); this.siteKey = siteKey; this.group = group; this.languageCode = languageCode; this.siteType = siteType; filePath = addProtocolPrefix(filePath); pagePath = addProtocolPrefix(pagePath); int iFileName = filePath.indexOf("$1"); this.filePathPre = filePath.substring(0, iFileName); this.filePathPost = filePath.substring(iFileName + 2); int iPageName = pagePath.indexOf("$1"); this.pagePathPre = pagePath.substring(0, iPageName); this.pagePathPost = pagePath.substring(iPageName + 2); } /** * Returns the file URL. * * @see Sites#getFileUrl(String, String) * @param fileName * the file name * @return the file URL */ String getFileUrl(String fileName) { return this.filePathPre + fileName + this.filePathPost; } /** * Returns the page URL. The method replaces spaces by underscores in * page titles on MediaWiki sites, since this is how MediaWiki page URLs * are constructed. For other sites, this might not be the case and * spaces will just be escaped in the standard way using "+". * * @see Sites#getPageUrl(String, String) * @param pageTitle * the page title, not escaped * @return the page URL */ String getPageUrl(String pageTitle) { try { String encodedTitle; if ("mediawiki".equals(this.siteType)) { encodedTitle = URLEncoder.encode( pageTitle.replace(" ", "_"), "utf-8"); // Keep special title symbols unescaped: encodedTitle = encodedTitle.replace("%3A", ":").replace( "%2F", "/"); } else { encodedTitle = URLEncoder.encode(pageTitle, "utf-8"); } return this.pagePathPre + encodedTitle + this.pagePathPost; } catch (UnsupportedEncodingException e) { throw new RuntimeException( "Your JRE does not support UTF-8 encoding. Srsly?!", e); } } /** * Find the default prefix that should be used for protocol-relative * URLs. The prefix {@link SitesImpl#DEFAULT_PROTOCOL_PREFIX} is added * to URLs that do not have a protocol yet. * * @param urlPrefix * the beginning of the URL * @return urlPrefix extended with default protocol if needed */ String addProtocolPrefix(String urlPrefix) { if ("//".equals(urlPrefix.substring(0, 2))) { return DEFAULT_PROTOCOL_PREFIX + urlPrefix; } else { return urlPrefix; } } } final HashMap sites = new HashMap<>(); @Override public void setSiteInformation(String siteKey, String group, String languageCode, String siteType, String filePath, String pagePath) { this.sites.put(siteKey, new SiteInformation(siteKey, group, languageCode, siteType, filePath, pagePath)); } @Override public String getLanguageCode(String siteKey) { if (this.sites.containsKey(siteKey)) { return this.sites.get(siteKey).languageCode; } else { return null; } } @Override public String getGroup(String siteKey) { if (this.sites.containsKey(siteKey)) { return this.sites.get(siteKey).group; } else { return null; } } @Override public String getPageUrl(String siteKey, String pageTitle) { if (this.sites.containsKey(siteKey)) { return this.sites.get(siteKey).getPageUrl(pageTitle); } else { return null; } } @Override public String getSiteLinkUrl(SiteLink siteLink) { return this.getPageUrl(siteLink.getSiteKey(), siteLink.getPageTitle()); } @Override public String getFileUrl(String siteKey, String fileName) { if (this.sites.containsKey(siteKey)) { return this.sites.get(siteKey).getFileUrl(fileName); } else { return null; } } @Override public String getSiteType(String siteKey) { if (this.sites.containsKey(siteKey)) { return this.sites.get(siteKey).siteType; } else { return null; } } } SnakGroupImpl.java000066400000000000000000000063031444772566300361760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.*; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; /** * Helper class to represent a {@link SnakGroup} deserialized from JSON. The * actual data is part of a map of lists of {@link SnakImpl} objects in JSON, * so there is no corresponding JSON object. * * @author Markus Kroetzsch * */ public class SnakGroupImpl extends AbstractList implements SnakGroup { private final List snaks; /** * Constructor. * * @param snaks * a non-empty list of snaks that use the same property */ public SnakGroupImpl(List snaks) { if (snaks == null || snaks.isEmpty()) { throw new IllegalArgumentException("A non-empty list of Snaks must be provided to create a SnakGroup"); } PropertyIdValue property = snaks.get(0).getPropertyId(); for (Snak s : snaks) { if (!property.equals(s.getPropertyId())) { throw new IllegalArgumentException( "All snaks in a snak group must use the same property"); } } this.snaks = Collections. unmodifiableList(snaks); } @Override public Snak get(int i) { return snaks.get(i); } @Override public Iterator iterator() { return snaks.iterator(); } @Override public int size() { return snaks.size(); } @Override public boolean isEmpty() { return snaks.isEmpty(); } @Override public List getSnaks() { return Collections.unmodifiableList(this.snaks); } @Override public PropertyIdValue getProperty() { return this.snaks.get(0).getPropertyId(); } /** * Construct a list of {@link SnakGroup} objects from a map from property * ids to snak lists as found in JSON. * * @param snaks * the map with the data * @return the result list */ public static List makeSnakGroups( Map> snaks, List propertyOrder) { List result = new ArrayList<>(snaks.size()); for (String propertyName : propertyOrder) { result.add(new SnakGroupImpl(snaks.get(propertyName))); } return result; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsSnakGroup(this, obj); } @Override public String toString() { return ToString.toString(this); } } SnakImpl.java000066400000000000000000000061711444772566300351640ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import org.apache.commons.lang3.Validate; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Snak; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes.Type; import com.fasterxml.jackson.annotation.JsonTypeInfo; /** * Abstract Jackson implementation of {@link Snak}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "snaktype") @JsonSubTypes({ @Type(value = NoValueSnakImpl.class, name = SnakImpl.JSON_SNAK_TYPE_NOVALUE), @Type(value = SomeValueSnakImpl.class, name = SnakImpl.JSON_SNAK_TYPE_SOMEVALUE), @Type(value = ValueSnakImpl.class, name = SnakImpl.JSON_SNAK_TYPE_VALUE) }) @JsonIgnoreProperties(ignoreUnknown = true) public abstract class SnakImpl implements Snak { /** * Type string used to denote value snaks in JSON. */ public static final String JSON_SNAK_TYPE_VALUE = "value"; /** * Type string used to denote somevalue snaks in JSON. */ public static final String JSON_SNAK_TYPE_SOMEVALUE = "somevalue"; /** * Type string used to denote novalue snaks in JSON. */ public static final String JSON_SNAK_TYPE_NOVALUE = "novalue"; /** * The property used by this snak. */ private final PropertyIdValue property; /** * Constructor. */ public SnakImpl(PropertyIdValue property) { Validate.notNull(property); this.property = property; } /** * Constructor. Creates an empty object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. * * This is not marked as JsonCreator because only concrete subclasses will * be deserialized directly. */ protected SnakImpl( String id, String siteIri) { Validate.notNull(id); Validate.notNull(siteIri); this.property = new PropertyIdValueImpl(id, siteIri); } /** * Returns the property id string. Only for use by Jackson during * serialization. * * @return the property id string */ @JsonProperty("property") public String getProperty() { return this.property.getId(); } @JsonIgnore @Override public PropertyIdValue getPropertyId() { return property; } @JsonProperty("snaktype") public abstract String getSnakType(); } SomeValueSnakImpl.java000066400000000000000000000045371444772566300370110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SnakVisitor; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link SomeValueSnak}. * * @author Fredo Erxleben * */ @JsonIgnoreProperties(ignoreUnknown = true) public class SomeValueSnakImpl extends SnakImpl implements SomeValueSnak { /** * Constructor. * * @param property * the id of the property used for this some value snak */ public SomeValueSnakImpl(PropertyIdValue property) { super(property); } /** * Constructor for deserialization from JSON with Jackson. */ @JsonCreator protected SomeValueSnakImpl( @JsonProperty("property") String property, @JacksonInject("siteIri") String siteIri) { super(property, siteIri); } @Override @JsonProperty("snaktype") public String getSnakType() { return SnakImpl.JSON_SNAK_TYPE_SOMEVALUE; } @Override public T accept(SnakVisitor snakVisitor) { return snakVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsSomeValueSnak(this, obj); } @Override public String toString() { return ToString.toString(this); } } StatementDocumentImpl.java000066400000000000000000000154111444772566300377300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.util.NestedIterator; import java.util.*; import java.util.Map.Entry; /** * Abstract Jackson implementation of {@link StatementDocument}. * You should not rely on it directly. * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Thomas Pellissier Tanon * */ abstract class StatementDocumentImpl extends EntityDocumentImpl implements StatementDocument { /** * This is what is called claim in the JSON model. It corresponds to * the statement group in the WDTK model. */ protected final Map> claims; /** * Statement groups. This member is initialized when statements are * accessed. */ private List statementGroups; /** * Constructor. * * @param id * the identifier of the subject of this document * @param claims * the statement groups contained in this document * @param revisionId * the id of the last revision of this document */ StatementDocumentImpl( EntityIdValue id, List claims, long revisionId) { super(id, revisionId); this.claims = new HashMap<>(); if(claims != null) { for(StatementGroup group : claims) { EntityIdValue otherId = group.getSubject(); otherId.getIri(); Validate.isTrue(group.getSubject().equals(id), "Subject for the statement group and the document are different: "+otherId.toString()+" vs "+id.toString()); this.claims.put(group.getProperty().getId(), group.getStatements()); } } } /** * Copy constructor. * * @param id * @param claims * @param revisionId */ protected StatementDocumentImpl( EntityIdValue id, Map> claims, long revisionId) { super(id, revisionId); this.claims = claims; } /** * Constructor used for JSON deserialization with Jackson. */ StatementDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("claims") Map> claims, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, revisionId, siteIri); if (claims != null) { this.claims = new HashMap<>(); EntityIdValue subject = this.getEntityId(); for (Entry> entry : claims .entrySet()) { List statements = new ArrayList<>(entry.getValue().size()); for (StatementImpl.PreStatement statement : entry.getValue()) { statements.add(statement.withSubject(subject)); } this.claims.put(entry.getKey(), statements); } } else { this.claims = Collections.emptyMap(); } } @JsonIgnore @Override public List getStatementGroups() { if (this.statementGroups == null) { this.statementGroups = new ArrayList<>(this.claims.size()); for (List statements : this.claims.values()) { this.statementGroups .add(new StatementGroupImpl(statements)); } } return this.statementGroups; } /** * Find a statement group by its property id, without checking for * equality with the site IRI. More efficient implementation than * the default one. */ public StatementGroup findStatementGroup(String propertyIdValue) { if (this.claims.containsKey(propertyIdValue)) { return new StatementGroupImpl(this.claims.get(propertyIdValue)); } return null; } /** * Returns the "claims". Only used by Jackson. *

* JSON "claims" correspond to statement groups in the WDTK model. You * should use {@link ItemDocumentImpl#getStatementGroups()} to obtain * this data. * * @return map of statement groups */ @JsonProperty("claims") public Map> getJsonClaims() { return this.claims; } @Override @JsonIgnore public Iterator getAllStatements() { return new NestedIterator<>(getStatementGroups()); } /** * Adds a Statement to a given collection of statement groups. * If the statement id is not null and matches that of an existing statement, * this statement will be replaced. * * @param statement * @param claims * @return */ protected static Map> addStatementToGroups(Statement statement, Map> claims) { Map> newGroups = new HashMap<>(claims); String pid = statement.getMainSnak().getPropertyId().getId(); if(newGroups.containsKey(pid)) { List newGroup = new ArrayList<>(newGroups.get(pid).size()); boolean statementReplaced = false; for(Statement existingStatement : newGroups.get(pid)) { if(existingStatement.getStatementId().equals(statement.getStatementId()) && !existingStatement.getStatementId().isEmpty()) { statementReplaced = true; newGroup.add(statement); } else { newGroup.add(existingStatement); } } if(!statementReplaced) { newGroup.add(statement); } newGroups.put(pid, newGroup); } else { newGroups.put(pid, Collections.singletonList(statement)); } return newGroups; } /** * Removes statement ids from a collection of statement groups. * @param statementIds * @param claims * @return */ protected static Map> removeStatements(Set statementIds, Map> claims) { Map> newClaims = new HashMap<>(claims.size()); for(Entry> entry : claims.entrySet()) { List filteredStatements = new ArrayList<>(); for(Statement s : entry.getValue()) { if(!statementIds.contains(s.getStatementId())) { filteredStatements.add(s); } } if(!filteredStatements.isEmpty()) { newClaims.put(entry.getKey(), filteredStatements); } } return newClaims; } } StatementDocumentUpdateImpl.java000066400000000000000000000054651444772566300411030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.util.Objects; import java.util.stream.Stream; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link StatementDocumentUpdate}. * * @see StatementUpdateImpl */ public abstract class StatementDocumentUpdateImpl extends EntityUpdateImpl implements StatementDocumentUpdate { @JsonIgnore private final StatementUpdate statements; /** * Initializes new entity update. * * @param entityId * ID of the entity that is to be updated * @param revisionId * base entity revision to be updated or zero if not available * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ protected StatementDocumentUpdateImpl( EntityIdValue entityId, long revisionId, StatementUpdate statements) { super(entityId, revisionId); Objects.requireNonNull(statements, "Statement update cannot be null."); EntityIdValue subject = Stream.concat(statements.getAdded().stream(), statements.getReplaced().values().stream()) .map(s -> s.getSubject()) .findFirst().orElse(null); Validate.isTrue(subject == null || subject.equals(entityId), "Statements describe different subject."); this.statements = statements; } @JsonIgnore @Override public boolean isEmpty() { return statements.isEmpty(); } @JsonIgnore @Override public StatementUpdate getStatements() { return statements; } @JsonProperty @JsonInclude(Include.NON_NULL) StatementUpdate getClaims() { return statements.isEmpty() ? null : statements; } } StatementGroupImpl.java000066400000000000000000000102531444772566300372450ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.*; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; /** * Helper class to represent a {@link StatementGroup}. * * @author Markus Kroetzsch * @author Antonin Delpeuch */ public class StatementGroupImpl extends AbstractList implements StatementGroup { private final List statements; /** * Constructor. * * @param statements * a non-empty list of statements that use the same subject and * main-snak property in their claim */ public StatementGroupImpl(List statements) { Validate.notNull(statements, "A non-null list of statements must be provided to create a statement group."); Validate.isTrue(!statements.isEmpty(), "A non-empty list of statements must be provided to create a statement group."); EntityIdValue subject = statements.get(0).getSubject(); PropertyIdValue property = statements.get(0).getMainSnak().getPropertyId(); for(Statement statement : statements) { Validate.isTrue(statement.getSubject().equals(subject), "All statements of a statement group must have the same subject."); Validate.isTrue(statement.getMainSnak().getPropertyId().equals(property), "All statements of a statement group must have the same subject."); } this.statements = statements; } @Override public Statement get(int i) { return statements.get(i); } @Override public Iterator iterator() { return statements.iterator(); } @Override public int size() { return statements.size(); } @Override public boolean isEmpty() { return statements.isEmpty(); } @Override public List getStatements() { return Collections.unmodifiableList(statements); } @Override public StatementGroup getBestStatements() { StatementRank bestRank = StatementRank.NORMAL; List bestStatements = new ArrayList<>(); for(Statement statement : statements) { if(statement.getRank() == StatementRank.PREFERRED && bestRank == StatementRank.NORMAL) { bestRank = StatementRank.PREFERRED; bestStatements.clear(); } if(statement.getRank() == bestRank) { bestStatements.add(statement); } } if (bestStatements.size() == 0) return null; return new StatementGroupImpl(bestStatements); } @Override public PropertyIdValue getProperty() { return statements.get(0).getMainSnak().getPropertyId(); } @Override public EntityIdValue getSubject() { return statements.get(0).getSubject(); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsStatementGroup(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public StatementGroup withStatement(Statement statement) { String statementId = statement.getStatementId(); boolean statementAdded = false; List newStatements = new ArrayList<>(this.statements); if (!statementId.isEmpty()) { for(int i = 0; i != newStatements.size(); i++) { String currentStatementId = newStatements.get(i).getStatementId(); if (currentStatementId.equals(statementId)) { newStatements.set(i, statement); statementAdded = true; } } } if (!statementAdded) { newStatements.add(statement); } return new StatementGroupImpl(newStatements); } } StatementImpl.java000066400000000000000000000241311444772566300362300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.*; import org.wikidata.wdtk.util.NestedIterator; import java.io.IOException; import java.util.*; /** * Jackson implementation of {@link Statement}. In JSON, the corresponding * structures are referred to as "claim". * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Thomas Pellissier Tanon * */ @JsonInclude(JsonInclude.Include.NON_EMPTY) public class StatementImpl implements Statement { private final String statementId; private final StatementRank rank; private final Snak mainSnak; /** * A map from property id strings to snaks that encodes the qualifiers. */ private final Map> qualifiers; /** * List of property string ids that encodes the desired order of qualifiers, * which is not specified by the map. */ private final List qualifiersOrder; private final EntityIdValue subjectId; private final List references; private List qualifiersGroups; /** * Constructor. *

* The statementId is used mainly for communication with a Wikibase site, in * order to refer to statements of that site. When creating new statements * that are not on any site, the empty string can be used. * * @param statementId * the string id of the Statement: can be empty if the statement has not obtained it yet * @param rank * the rank of the Statement * @param mainSnak * the main snak for the Claim of the Statement * @param qualifiers * the snak groups for the qualifiers * @param references * the references for the Statement * @param subjectId * the subject of this Statement */ public StatementImpl( String statementId, StatementRank rank, Snak mainSnak, List qualifiers, List references, EntityIdValue subjectId) { this.statementId = (statementId == null) ? "" : statementId; Validate.notNull(rank, "No rank provided to create a statement."); this.rank = rank; Validate.notNull(mainSnak, "No main snak provided to create a statement."); this.mainSnak = mainSnak; this.qualifiers = new HashMap<>(); this.qualifiersOrder = new ArrayList<>(); for(SnakGroup qualifiersGroup : qualifiers) { this.qualifiers.put(qualifiersGroup.getProperty().getId(), qualifiersGroup.getSnaks()); this.qualifiersOrder.add(qualifiersGroup.getProperty().getId()); } this.references = (references == null) ? Collections.emptyList() : references; Validate.notNull(subjectId); this.subjectId = subjectId; } public StatementImpl( String statementId, StatementRank rank, Snak mainSnak, Map> qualifiers, List qualifiersOrder, List references, EntityIdValue subjectId) { this.statementId = (statementId == null) ? "" : statementId; Validate.notNull(rank, "No rank provided to create a statement."); this.rank = rank; Validate.notNull(mainSnak, "No main snak provided to create a statement."); this.mainSnak = mainSnak; this.qualifiers = (qualifiers == null) ? Collections.emptyMap() : qualifiers; this.qualifiersOrder = (qualifiersOrder == null) ? Collections.emptyList() : qualifiersOrder; this.references = (references == null) ? Collections.emptyList() : references; Validate.notNull(subjectId); this.subjectId = subjectId; } /** * TODO review the utility of this constructor. */ public StatementImpl(String statementId, Snak mainsnak, EntityIdValue subjectId) { this(statementId, StatementRank.NORMAL, mainsnak, null, null, null, subjectId); } /** * Returns the value for the "type" field used in JSON. Only for use by * Jackson during deserialization. * * @return "statement" */ @JsonProperty("type") String getJsonType() { return "statement"; } @Override @JsonIgnore public Claim getClaim() { return new ClaimImpl(this); } @Override @JsonIgnore public EntityIdValue getSubject() { return subjectId; } @Override @JsonProperty("mainsnak") public Snak getMainSnak() { return mainSnak; } @Override @JsonIgnore public List getQualifiers() { if (qualifiersGroups == null) { qualifiersGroups = SnakGroupImpl.makeSnakGroups(qualifiers, qualifiersOrder); } return qualifiersGroups; } @Override @JsonIgnore public Iterator getAllQualifiers() { return new NestedIterator<>(getQualifiers()); } /** * Returns the qualifiers of the claim of this statement. Only for use by * Jackson during serialization. To access this data, use * {@link Statement#getQualifiers()}. */ @JsonProperty("qualifiers") Map> getJsonQualifiers() { return Collections.unmodifiableMap(qualifiers); } /** * Returns the list of property ids used to order qualifiers as found in * JSON. Only for use by Jackson during serialization. * * @return the list of property ids */ @JsonProperty("qualifiers-order") List getQualifiersOrder() { return Collections.unmodifiableList(this.qualifiersOrder); } @Override @JsonSerialize(using = StatementRankSerializer.class) public StatementRank getRank() { return rank; } @Override @JsonProperty("references") public List getReferences() { return references; } @Override @JsonProperty("id") public String getStatementId() { return statementId; } @Override @JsonIgnore public Value getValue() { if (mainSnak instanceof ValueSnak) { return ((ValueSnak)mainSnak).getValue(); } return null; } @Override public Statement withStatementId(String id) { return new StatementImpl(id, getRank(), getMainSnak(), getQualifiers(), getReferences(), getSubject()); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsStatement(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Helper class for deserializing statements from JSON. */ @JsonIgnoreProperties(ignoreUnknown=true) public static class PreStatement { private final String statementId; private final StatementRank rank; private final List references; private final Snak mainSnak; private final Map> qualifiers; private final List qualifiersOrder; private PreStatement( String statementId, StatementRank rank, Snak mainsnak, Map> qualifiers, List qualifiersOrder, List references) { this.statementId = statementId; this.rank = rank; this.mainSnak = mainsnak; this.qualifiers = qualifiers; this.qualifiersOrder = qualifiersOrder; this.references = references; } /** * JSON deserialization creator. */ @JsonCreator static PreStatement fromJson( @JsonProperty("id") String id, @JsonProperty("rank") @JsonDeserialize(using = StatementRankDeserializer.class) StatementRank rank, @JsonProperty("mainsnak") SnakImpl mainsnak, @JsonProperty("qualifiers") Map> qualifiers, @JsonProperty("qualifiers-order") List qualifiersOrder, @JsonProperty("references") @JsonDeserialize(contentAs=ReferenceImpl.class) List references) { // Forget the concrete type of Jackson snaks for the qualifiers if(qualifiers == null) { qualifiers = Collections.emptyMap(); } Map> newQualifiers = new HashMap<>(qualifiers.size()); for(Map.Entry> entry : qualifiers.entrySet()) { List snaks = new ArrayList<>(entry.getValue()); newQualifiers.put(entry.getKey(), snaks); } return new PreStatement(id, rank, mainsnak, newQualifiers, qualifiersOrder, references); } public StatementImpl withSubject(EntityIdValue subjectId) { return new StatementImpl(statementId, rank, mainSnak, qualifiers, qualifiersOrder, references, subjectId); } } /** * A serializer implementation for the StatementRank enumeration. This is * necessary since Java enumerations are in upper case but the Json counterpart * is in lower case. */ static class StatementRankSerializer extends JsonSerializer { @Override public void serialize(StatementRank value, JsonGenerator jgen, SerializerProvider provider) throws IOException { jgen.writeString(value.name().toLowerCase()); } } /** * A deserializer implementation for the StatementRank enumeration. This is * necessary since Java enumerations are in upper case but the Json counterpart * is in lower case. */ static class StatementRankDeserializer extends JsonDeserializer { @Override public StatementRank deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { return StatementRank.valueOf(jp.getText().toUpperCase()); } } } StatementUpdateImpl.java000066400000000000000000000117241444772566300373770ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toMap; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Stream; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonValue; /** * Jackson implementation of {@link StatementUpdate}. * * @see StatementDocumentUpdateImpl */ public class StatementUpdateImpl implements StatementUpdate { @JsonIgnore private final List added; @JsonIgnore private final Map replaced; @JsonIgnore private final Set removed; /** * Initializes new statement update. * * @param added * added statements * @param replaced * replaced statements * @param removed * IDs of removed statements * @throws NullPointerException * if any parameter or any item is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public StatementUpdateImpl(Collection added, Collection replaced, Collection removed) { Objects.requireNonNull(added, "Added statement collection cannot be null."); Objects.requireNonNull(replaced, "Replaced statement collection cannot be null."); Objects.requireNonNull(removed, "Removed statement collection cannot be null."); for (Statement statement : added) { Objects.requireNonNull(statement, "Added statement cannot be null."); Validate.isTrue(statement.getStatementId().isEmpty(), "Added statement cannot have an ID."); } for (Statement statement : replaced) { Objects.requireNonNull(statement, "Replaced statement cannot be null."); Validate.notBlank(statement.getStatementId(), "Replaced statement must have an ID."); } for (String id : removed) { Validate.notBlank(id, "Removed statement ID cannot be null or blank."); } long distinctIds = Stream .concat(replaced.stream().map(s -> s.getStatementId()), removed.stream()) .distinct() .count(); Validate.isTrue(replaced.size() + removed.size() == distinctIds, "Statement IDs must be unique."); Validate.isTrue( Stream.concat(added.stream(), replaced.stream()).map(s -> s.getSubject()).distinct().count() <= 1, "All statements must have the same subject."); EntityIdValue subject = Stream.concat(added.stream(), replaced.stream()) .map(s -> s.getSubject()) .findFirst().orElse(null); Validate.isTrue(subject == null || !subject.isPlaceholder(), "Cannot update entity with placeholder ID."); this.added = Collections.unmodifiableList(new ArrayList<>(added)); this.replaced = Collections.unmodifiableMap(replaced.stream().collect(toMap(s -> s.getStatementId(), s -> s))); this.removed = Collections.unmodifiableSet(new HashSet<>(removed)); } @JsonIgnore @Override public boolean isEmpty() { return added.isEmpty() && replaced.isEmpty() && removed.isEmpty(); } @JsonIgnore @Override public List getAdded() { return added; } @JsonIgnore @Override public Map getReplaced() { return replaced; } @JsonIgnore @Override public Set getRemoved() { return removed; } static class RemovedStatement { private final String id; RemovedStatement(String id) { this.id = id; } public String getId() { return id; } @JsonProperty("remove") String getRemoveCommand() { return ""; } } @JsonValue List toJson() { List list = new ArrayList<>(); list.addAll(added); list.addAll(replaced.values()); for (String id : removed) { list.add(new RemovedStatement(id)); } return list; } @Override public boolean equals(Object obj) { return Equality.equalsStatementUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } StringValueImpl.java000066400000000000000000000050221444772566300365250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Jackson implementation of {@link StringValue}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class StringValueImpl extends ValueImpl implements StringValue { private final String value; /** * Constructor for deserialization from JSON by Jackson. * * @param value * the string represented by this Wikibase value */ @JsonCreator public StringValueImpl( @JsonProperty("value") String value) { super(ValueImpl.JSON_VALUE_TYPE_STRING); Validate.notNull(value, "A string value must provide a non-null string"); this.value = value; } /** * Returns the string. Only for use by Jackson during serialization. * * @return the string value */ public String getValue() { return this.value; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsStringValue(this, obj); } @JsonIgnore @Override public String getString() { return this.value; } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String toString() { return ToString.toString(this); } } TermImpl.java000066400000000000000000000062141444772566300351750ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson representation of {@link MonolingualTextValue} data used in labels, * aliases, and descriptions in JSON. Note that this is distinct from the JSON * representation for property values of type {@link MonolingualTextValue}, * which is implemented in {@link MonolingualTextValueImpl}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) public class TermImpl implements MonolingualTextValue { /** * The language code. */ private final String languageCode; /** * The text value. */ private final String text; /** * Create a new object from the given data. * * @param languageCode * the language code of the value * @param text * the text content of the value */ @JsonCreator public TermImpl( @JsonProperty("language") String languageCode, @JsonProperty("value") String text) { Validate.notNull(languageCode, "A language has to be provided to create a MonolingualTextValue"); this.languageCode = languageCode; Validate.notNull(text, "A text has to be provided to create a MonolingualTextValue"); this.text = text; } /** * Copy constructor. This constructor is useful for converting from * {@link MonolingualTextValueImpl}. * * @param other * monolingual text value to copy */ public TermImpl(MonolingualTextValue other) { this(other.getLanguageCode(), other.getText()); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @JsonProperty("value") @Override public String getText() { return this.text; } @JsonProperty("language") @Override public String getLanguageCode() { return this.languageCode; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsMonolingualTextValue(this, obj); } @Override public String toString() { return ToString.toString(this); } } TermUpdateImpl.java000066400000000000000000000076551444772566300363520ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toMap; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Stream; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonValue; /** * Jackson implementation of {@link TermUpdate}. */ public class TermUpdateImpl implements TermUpdate { @JsonIgnore private final Map modified; @JsonIgnore private final Set removed; /** * Initializes new term update. * * @param modified * added or changed terms * @param removed * language codes of removed terms * @throws NullPointerException * if any required parameter or its item is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ public TermUpdateImpl(Collection modified, Collection removed) { Objects.requireNonNull(modified, "Collection of modified terms cannot be null."); Objects.requireNonNull(removed, "Collection of removed terms cannot be null."); for (MonolingualTextValue value : modified) { Objects.requireNonNull(value, "Modified term cannot be null."); } for (String language : removed) { Validate.notBlank(language, "Language code must be a non-blank string."); } long distinct = Stream.concat(removed.stream(), modified.stream().map(v -> v.getLanguageCode())).distinct().count(); Validate.isTrue(distinct == modified.size() + removed.size(), "Every term must have unique language code."); this.modified = Collections.unmodifiableMap(modified.stream() .map(TermImpl::new) .collect(toMap(v -> v.getLanguageCode(), r -> r))); this.removed = Collections.unmodifiableSet(new HashSet<>(removed)); } @JsonIgnore @Override public boolean isEmpty() { return modified.isEmpty() && removed.isEmpty(); } @JsonIgnore @Override public Map getModified() { return modified; } @JsonIgnore @Override public Set getRemoved() { return removed; } static class RemovedTerm { private final String language; RemovedTerm(String language) { this.language = language; } @JsonProperty String getLanguage() { return language; } @JsonProperty("remove") String getRemoveCommand() { return ""; } } @JsonValue Map toJson() { Map map = new HashMap<>(); for (MonolingualTextValue value : modified.values()) { map.put(value.getLanguageCode(), value); } for (String language : removed) { map.put(language, new RemovedTerm(language)); } return map; } @Override public boolean equals(Object obj) { return Equality.equalsTermUpdate(this, obj); } @Override public int hashCode() { return Hash.hashCode(this); } } TermedDocumentUpdateImpl.java000066400000000000000000000077371444772566300403630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static java.util.stream.Collectors.toMap; import java.util.Collections; import java.util.Map; import java.util.Objects; import org.apache.commons.lang3.Validate; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocumentUpdate; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link TermedStatementDocumentUpdate}. */ public abstract class TermedDocumentUpdateImpl extends LabeledDocumentUpdateImpl implements TermedStatementDocumentUpdate { @JsonIgnore private final TermUpdate descriptions; @JsonIgnore private final Map aliases; /** * Initializes new entity update. * * @param entityId * ID of the entity that is to be updated * @param revisionId * base entity revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions, possibly empty * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @throws NullPointerException * if any required parameter or its part is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ protected TermedDocumentUpdateImpl( EntityIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements) { super(entityId, revisionId, labels, statements); Objects.requireNonNull(descriptions, "Description update cannot be null."); Objects.requireNonNull(aliases, "Alias map cannot be null."); for (Map.Entry entry : aliases.entrySet()) { Validate.notBlank(entry.getKey(), "Alias language code cannot be null or blank."); Objects.requireNonNull(entry.getValue(), "Alias update cannot be null."); if (entry.getValue().getLanguageCode().isPresent()) { Validate.isTrue(entry.getValue().getLanguageCode().get().equals(entry.getKey()), "Inconsistent alias language codes."); } } this.descriptions = descriptions; this.aliases = Collections.unmodifiableMap(aliases.keySet().stream() .filter(k -> !aliases.get(k).isEmpty()) .collect(toMap(k -> k, k -> aliases.get(k)))); } @JsonIgnore @Override public boolean isEmpty() { return super.isEmpty() && descriptions.isEmpty() && aliases.isEmpty(); } @JsonIgnore @Override public TermUpdate getDescriptions() { return descriptions; } @JsonProperty("descriptions") @JsonInclude(Include.NON_NULL) TermUpdate getJsonDescriptions() { return descriptions.isEmpty() ? null : descriptions; } @JsonProperty("aliases") @JsonInclude(Include.NON_EMPTY) @Override public Map getAliases() { return aliases; } } TermedStatementDocumentImpl.java000066400000000000000000000212061444772566300410700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.*; import com.fasterxml.jackson.annotation.JsonSubTypes.Type; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; import java.util.Map.Entry; /** * Abstract Jackson implementation of {@link TermedDocument} and {@link StatementDocument}. * You should not rely on it directly but build instances with the Datamodel helper and * use {@link EntityDocumentImpl} for deserialization. * * @author Fredo Erxleben * @author Antonin Delpeuch * @author Thomas Pellissier Tanon * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes({ //TODO: drop in future release @Type(value = ItemDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_ITEM), @Type(value = PropertyDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_PROPERTY), @Type(value = MediaInfoDocumentImpl.class, name = EntityDocumentImpl.JSON_TYPE_MEDIA_INFO) }) public abstract class TermedStatementDocumentImpl extends LabeledStatementDocumentImpl implements TermedStatementDocument { protected final Map descriptions; protected final Map> aliases; /** * Constructor. * * @param id * the identifier of the subject of this document * @param labels * the labels for this entity, at most one per language * @param descriptions * the descriptions for this entity, at most one per language * @param aliases * the aliases for this language. Their relative order in a * given language will be preserved. * @param claims * the statement groups contained in this document * @param revisionId * the id of the last revision of this document */ public TermedStatementDocumentImpl( EntityIdValue id, List labels, List descriptions, List aliases, List claims, long revisionId) { super(id, labels, claims, revisionId); if (descriptions != null) { this.descriptions = constructTermMap(descriptions); } else { this.descriptions = Collections.emptyMap(); } if (aliases != null) { this.aliases = constructTermListMap(aliases); } else { this.aliases = Collections.emptyMap(); } } /** * Constructor used for JSON deserialization with Jackson. */ TermedStatementDocumentImpl( @JsonProperty("id") String jsonId, @JsonProperty("labels") Map labels, @JsonProperty("descriptions") Map descriptions, @JsonProperty("aliases") Map> aliases, @JsonProperty("claims") Map> claims, @JsonProperty("lastrevid") long revisionId, @JacksonInject("siteIri") String siteIri) { super(jsonId, labels, claims, revisionId, siteIri); if (descriptions != null) { this.descriptions = descriptions; } else { this.descriptions = Collections.emptyMap(); } if (aliases != null) { this.aliases = aliases; } else { this.aliases = Collections.emptyMap(); } } /** * Protected constructor provided to ease the creation * of copies. No check is made and each field is reused without * copying. * * @param labels * a map from language codes to monolingual values with * the same language codes * @param descriptions * a map from language codes to monolingual values with * the same language codes * @param aliases * a map from language codes to lists of monolingual values * with the same language codes * @param claims * @param revisionId */ protected TermedStatementDocumentImpl( EntityIdValue subject, Map labels, Map descriptions, Map> aliases, Map> claims, long revisionId) { super(subject, labels, claims, revisionId); this.descriptions = descriptions; this.aliases = aliases; } @JsonProperty("aliases") @Override public Map> getAliases() { // because of the typing provided by the interface one has to // re-create the map anew, simple casting is not possible Map> returnMap = new HashMap<>(); for (Entry> entry : this.aliases .entrySet()) { returnMap.put(entry.getKey(), Collections . unmodifiableList(entry.getValue())); } return Collections.unmodifiableMap(returnMap); } @JsonProperty("descriptions") @Override public Map getDescriptions() { return Collections.unmodifiableMap(this.descriptions); } @JsonIgnore public String getSiteIri() { return this.siteIri; } private static Map> constructTermListMap(List terms) { Map> map = new HashMap<>(); for(MonolingualTextValue term : terms) { String language = term.getLanguageCode(); // We need to make sure the terms are of the right type, otherwise they will not // be serialized correctly. if(!map.containsKey(language)) { map.put(language, new ArrayList<>()); } map.get(language).add(toTerm(term)); } return map; } protected static Map> withAliases( Map> values, String language, List aliases) { Map> newValues = new HashMap<>(values); if(!newValues.containsKey(language)) { newValues.put(language, new ArrayList<>()); } List l = newValues.get(language); l.clear(); for(MonolingualTextValue term : aliases) { if(!term.getLanguageCode().equals(language)) { throw new IllegalArgumentException("The alias " + term + " does not have the same language as its group " + language); } l.add(toTerm(term)); } return newValues; } /** * We need to make sure the terms are of the right type, otherwise they will not be serialized correctly. */ private static MonolingualTextValue toTerm(MonolingualTextValue term) { return term instanceof TermImpl ? term : new TermImpl(term.getLanguageCode(), term.getText()); } /** * A deserializer implementation for the aliases in an * {@link TermedStatementDocumentImpl}. *

* It implements a workaround to cope with empty aliases being represented as * "aliases":[] despite its declaration as map and not as list or * array. This is neither nice nor fast, and should be obsolete as soon as * possible. * */ static class AliasesDeserializer extends JsonDeserializer>> { @Override public Map> deserialize( JsonParser jp, DeserializationContext ctxt) throws JsonMappingException { Map> contents = new HashMap<>(); try { JsonNode node = jp.getCodec().readTree(jp); if (!node.isArray()) { Iterator> nodeIterator = node.fields(); while (nodeIterator.hasNext()) { List mltvList = new ArrayList<>(); Entry currentNode = nodeIterator.next(); // get the list of MLTVs for (JsonNode mltvEntry : currentNode.getValue()) { String language = mltvEntry.get("language").asText(); String value = mltvEntry.get("value").asText(); mltvList.add(new TermImpl(language,value)); } contents.put(currentNode.getKey(), mltvList); } } } catch (Exception e) { throw new JsonMappingException(jp, "Unexpected alias list serialization", e); } return contents; } } } TimeValueImpl.java000066400000000000000000000312231444772566300361570ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import java.time.DateTimeException; import java.time.LocalDate; import org.apache.commons.lang3.Validate; import org.threeten.extra.chrono.JulianDate; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link TimeValue}. * * @author Fredo Erxleben * @author Markus Kroetzsch * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) @JsonDeserialize() public class TimeValueImpl extends ValueImpl implements TimeValue { /** * Inner helper object to store the actual data. Used to get the nested JSON * structure that is required here. */ private final JacksonInnerTime value; /** * Constructor. * * @param year * a year number, where 0 refers to 1BCE * @param month * a month number between 1 and 12 * @param day * a day number between 1 and 31 * @param hour * an hour number between 0 and 23 * @param minute * a minute number between 0 and 59 * @param second * a second number between 0 and 60 (possible leap second) * @param precision * a value in the range of {@link TimeValue#PREC_DAY}, ..., * {@link TimeValue#PREC_1GY} * @param beforeTolerance * non-negative integer tolerance before the value; see * {@link TimeValue#getBeforeTolerance()} * @param afterTolerance * non-zero, positive integer tolerance before the value; see * {@link TimeValue#getAfterTolerance()} * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} * @param timezoneOffset * offset in minutes that should be applied when displaying this * time */ public TimeValueImpl(long year, byte month, byte day, byte hour, byte minute, byte second, byte precision, int beforeTolerance, int afterTolerance, int timezoneOffset, String calendarModel) { super(JSON_VALUE_TYPE_TIME); this.value = new JacksonInnerTime( year, month, day, hour, minute, second, timezoneOffset, beforeTolerance, afterTolerance, precision, calendarModel); } /** * Constructor used for deserialization from JSON with Jackson. */ @JsonCreator TimeValueImpl( @JsonProperty("value") JacksonInnerTime value) { super(JSON_VALUE_TYPE_TIME); this.value = value; } /** * Returns the inner value helper object. Only for use by Jackson during * serialization. * * @return the inner time value */ public JacksonInnerTime getValue() { return value; } @JsonIgnore @Override public long getYear() { return this.value.getYear(); } @JsonIgnore @Override public byte getMonth() { return this.value.getMonth(); } @JsonIgnore @Override public byte getDay() { return this.value.getDay(); } @JsonIgnore @Override public byte getHour() { return this.value.getHour(); } @JsonIgnore @Override public byte getMinute() { return this.value.getMinute(); } @JsonIgnore @Override public byte getSecond() { return this.value.getSecond(); } @JsonIgnore @Override public String getPreferredCalendarModel() { return this.value.getCalendarmodel(); } @JsonIgnore @Override public ItemIdValue getPreferredCalendarModelItemId() { return ItemIdValueImpl.fromIri(this.value.getCalendarmodel()); } @JsonIgnore @Override public byte getPrecision() { return (byte) this.value.getPrecision(); } @JsonIgnore @Override public int getTimezoneOffset() { return this.value.getTimezone(); } @JsonIgnore @Override public int getBeforeTolerance() { return this.value.getBefore(); } @JsonIgnore @Override public int getAfterTolerance() { return this.value.getAfter(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsTimeValue(this, obj); } @Override public String toString() { return ToString.toString(this); } @Override public TimeValue toGregorian() { // already in Gregorian calendar if (this.getPreferredCalendarModel().equals(TimeValue.CM_GREGORIAN_PRO)) { return this; } // convert Julian if (this.getPreferredCalendarModel().equals(TimeValue.CM_JULIAN_PRO) && this.getPrecision() >= TimeValue.PREC_DAY && this.value.year > Integer.MIN_VALUE && this.value.year < Integer.MAX_VALUE ) { try { final JulianDate julian = JulianDate.of((int) this.value.year, this.value.month, this.value.day); final LocalDate date = LocalDate.from(julian); return new TimeValueImpl( date.getYear(), (byte) date.getMonth().getValue(), (byte) date.getDayOfMonth(), this.value.hour, this.value.minute, this.value.second, (byte) this.value.precision, this.value.before, this.value.after, this.value.timezone, TimeValue.CM_GREGORIAN_PRO ); } catch(DateTimeException e) { return null; } } return null; } /** * Helper object that represents the JSON object structure of the value. */ @JsonIgnoreProperties(ignoreUnknown = true) static class JacksonInnerTime { private final String time; private final int timezone; private final int before; private final int after; private final int precision; private final String calendarmodel; private long year; private byte month; private byte day; private byte hour; private byte minute; private byte second; /** * Constructs a new object for the given data. * * @param time * an ISO timestamp * @param timezone * offset in minutes that should be applied when displaying this * time * @param before * non-negative integer tolerance before the value; see * {@link TimeValue#getBeforeTolerance()} * @param after * non-zero, positive integer tolerance before the value; see * {@link TimeValue#getAfterTolerance()} * @param precision * a value in the range of {@link TimeValue#PREC_DAY}, ..., * {@link TimeValue#PREC_1GY} * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} */ @JsonCreator JacksonInnerTime( @JsonProperty("time") String time, @JsonProperty("timezone") int timezone, @JsonProperty("before") int before, @JsonProperty("after") int after, @JsonProperty("precision") int precision, @JsonProperty("calendarmodel") String calendarModel) { this.time = time; this.timezone = timezone; this.before = before; this.after = after; this.precision = precision; this.calendarmodel = calendarModel; this.decomposeTimeString(); } /** * Constructor for times that have already been parsed. * * @param year * a year number, where 0 refers to 1BCE * @param month * a month number between 1 and 12 * @param day * a day number between 1 and 31 * @param hour * an hour number between 0 and 23 * @param minute * a minute number between 0 and 59 * @param second * a second number between 0 and 60 (possible leap second) * @param timezone * offset in minutes that should be applied when displaying this * time * @param before * non-negative integer tolerance before the value; see * {@link TimeValue#getBeforeTolerance()} * @param after * non-zero, positive integer tolerance before the value; see * {@link TimeValue#getAfterTolerance()} * @param precision * a value in the range of {@link TimeValue#PREC_DAY}, ..., * {@link TimeValue#PREC_1GY} * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} */ JacksonInnerTime(long year, byte month, byte day, byte hour, byte minute, byte second, int timezone, int before, int after, int precision, String calendarModel) { Validate.notNull(calendarModel, "Calendar model must not be null"); this.year = year; this.month = month; this.day = day; this.hour = hour; this.minute = minute; this.second = second; this.timezone = timezone; this.before = before; this.after = after; this.precision = precision; this.calendarmodel = calendarModel; this.time = composeTimeString(); } /** * Helper method to decompose the time string into its parts. */ private void decomposeTimeString() { // decompose the time string into its parts String[] substrings = time.split("(? contents; @JsonCreator private JacksonIdValue( @JsonProperty("entity-type") String entityType, @JsonProperty("id") String id) { Validate.notNull(id); this.entityType = entityType; this.id = id; contents = new HashMap<>(); } @JsonProperty("entity-type") @JsonInclude(Include.NON_NULL) public String getEntityTypeString() { return entityType; } @JsonProperty("id") public String getId() { return id; } @JsonAnyGetter protected Map getContents() { return contents; } @JsonAnySetter protected void loadContents(String key, JsonNode value) { this.contents.put(key, value); } } @Override public String toString() { return ToString.toString(this); } @Override @JsonIgnore public String getEntityType() { if (value.entityType == null) { return ET_UNSUPPORTED; } String[] parts = value.entityType.split("-"); for(int i = 0; i < parts.length; i++) { parts[i] = StringUtils.capitalize(parts[i]); } return "http://www.wikidata.org/ontology#" + StringUtils.join(parts); } @Override @JsonIgnore public String getId() { return value.getId(); } @JsonProperty("value") protected JacksonIdValue getInnerValue() { return value; } @Override @JsonIgnore public String getSiteIri() { return siteIri; } @Override @JsonIgnore public String getIri() { return siteIri.concat(value.getId()); } @JsonIgnore @Override public boolean isPlaceholder() { return false; } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override @JsonIgnore public String getEntityTypeJsonString() { return value.getEntityTypeString(); } } UnsupportedValueImpl.java000066400000000000000000000063461444772566300376210ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.HashMap; import java.util.Map; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Represents a value with an unsupported datatype. * We can still "deserialize" it by just storing its * JSON representation, so that it can be serialized * back to its original representation. * This avoids parsing failures on documents containing * these values. * * @author Antonin Delpeuch * */ @JsonDeserialize() public class UnsupportedValueImpl extends ValueImpl implements UnsupportedValue { private final String typeString; private final Map contents; @JsonCreator private UnsupportedValueImpl( @JsonProperty("type") String typeString) { super(typeString); this.typeString = typeString; this.contents = new HashMap<>(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override @JsonProperty("type") public String getTypeJsonString() { return typeString; } @JsonAnyGetter protected Map getContents() { return contents; } @JsonAnySetter protected void loadContents(String key, JsonNode value) { this.contents.put(key, value); } @Override public String toString() { return ToString.toString(this); } /** * We do not use the Hash helper as in other datamodel * classes because this would require exposing the contents * of the value publicly, which goes against the desired * opacity of the representation. */ @Override public int hashCode() { return typeString.hashCode() + 31*contents.hashCode(); } /** * We do not use the Equality helper as in other datamodel * classes because this would require exposing the contents * of the value publicly, which goes against the desired * opacity of the representation. */ @Override public boolean equals(Object other) { if (!(other instanceof UnsupportedValueImpl)) { return false; } UnsupportedValueImpl otherValue = (UnsupportedValueImpl) other; return typeString.equals(otherValue.getTypeJsonString()) && contents.equals(otherValue.getContents()); } } ValueImpl.java000066400000000000000000000142221444772566300353400ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.Value; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import java.io.IOException; /** * Abstract Jackson implementation of {@link Value}. * * @author Fredo Erxleben * @author Markus Kroetzsch * */ @JsonDeserialize(using = ValueImpl.JacksonDeserializer.class) @JsonIgnoreProperties(ignoreUnknown = true) public abstract class ValueImpl implements Value { /** * String used to denote the string value type in JSON. */ public static final String JSON_VALUE_TYPE_STRING = "string"; /** * String used to denote the time value type in JSON. */ public static final String JSON_VALUE_TYPE_TIME = "time"; /** * String used to denote the globe coordinates value type in JSON. */ public static final String JSON_VALUE_TYPE_GLOBE_COORDINATES = "globecoordinate"; /** * String used to denote the entity id value type in JSON. */ public static final String JSON_VALUE_TYPE_ENTITY_ID = "wikibase-entityid"; /** * String used to denote the quantity value type in JSON. */ public static final String JSON_VALUE_TYPE_QUANTITY = "quantity"; /** * String used to denote the monolingual text value type in JSON. */ public static final String JSON_VALUE_TYPE_MONOLINGUAL_TEXT = "monolingualtext"; /** * JSON type id of this value. */ private final String type; /** * Constructor. Creates a value object with the given JSON type. * * @param type * JSON type constant */ public ValueImpl(String type) { this.type = type; } /** * Returns the JSON type string of this value. Only for use by Jackson * during serialization. * * @return the JSON type string */ public String getType() { return this.type; } /** * Custom Jackson deserializer that maps the JSON representation of Wikibase * values to WDTK classes. In most cases, the class to use is defined by the * value of the "type" field, but for entities one has to look deeper into the * structure to get the "entity-type" field as well. This is not possible using * simpler mechanisms. * */ static class JacksonDeserializer extends StdDeserializer { private static final long serialVersionUID = 6659522064661507680L; JacksonDeserializer() { super(ValueImpl.class); } @Override public ValueImpl deserialize(JsonParser jsonParser, DeserializationContext ctxt) throws IOException { ObjectCodec mapper = jsonParser.getCodec(); JsonNode root = mapper.readTree(jsonParser); Class valueClass = getValueClass(root, jsonParser); return mapper.treeToValue(root, valueClass); } /** * Finds the Java class to use for deserializing the JSON structure * represented by the given node. * * @param jsonNode * the JSON node that represents the value to deserialize * @return the Java class to use for deserialization * @throws JsonMappingException * if we do not have a class for the given JSON */ private Class getValueClass(JsonNode jsonNode, JsonParser jsonParser) throws JsonMappingException { String jsonType = jsonNode.get("type").asText(); switch (jsonType) { case JSON_VALUE_TYPE_ENTITY_ID: JsonNode valueNode = jsonNode.get("value"); if (valueNode != null) { if(valueNode.has("entity-type")) { try { return getValueClassFromEntityType(valueNode.get("entity-type").asText()); } catch (IllegalArgumentException e) { return UnsupportedEntityIdValueImpl.class; } } else if(valueNode.has("id")) { try { return getValueClassFromEntityType( EntityIdValueImpl.guessEntityTypeFromId(valueNode.get("id").asText(),true) ); } catch (IllegalArgumentException e) { return UnsupportedEntityIdValueImpl.class; } } else { throw new JsonMappingException(jsonParser, "Unexpected entity id serialization"); } } case JSON_VALUE_TYPE_STRING: return StringValueImpl.class; case JSON_VALUE_TYPE_TIME: return TimeValueImpl.class; case JSON_VALUE_TYPE_GLOBE_COORDINATES: return GlobeCoordinatesValueImpl.class; case JSON_VALUE_TYPE_QUANTITY: return QuantityValueImpl.class; case JSON_VALUE_TYPE_MONOLINGUAL_TEXT: return MonolingualTextValueImpl.class; default: return UnsupportedValueImpl.class; } } private Class getValueClassFromEntityType(String entityType) { switch (entityType) { case EntityIdValueImpl.JSON_ENTITY_TYPE_ITEM: return ItemIdValueImpl.class; case EntityIdValueImpl.JSON_ENTITY_TYPE_LEXEME: return LexemeIdValueImpl.class; case EntityIdValueImpl.JSON_ENTITY_TYPE_PROPERTY: return PropertyIdValueImpl.class; case EntityIdValueImpl.JSON_ENTITY_TYPE_FORM: return FormIdValueImpl.class; case EntityIdValueImpl.JSON_ENTITY_TYPE_SENSE: return SenseIdValueImpl.class; case EntityIdValueImpl.JSON_ENTITY_TYPE_MEDIA_INFO: return MediaInfoIdValueImpl.class; default: throw new IllegalArgumentException("Entities of type \"" + entityType + "\" are not supported."); } } } } ValueSnakImpl.java000066400000000000000000000131401444772566300361530ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import org.apache.commons.lang3.Validate; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.SnakVisitor; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Jackson implementation of {@link ValueSnak}. * * @author Fredo Erxleben * @author Antonin Delpeuch * */ @JsonIgnoreProperties(ignoreUnknown = true) public class ValueSnakImpl extends SnakImpl implements ValueSnak { /** * The {@link Value} assigned to this snak. */ @JsonDeserialize(as = ValueImpl.class) private final Value datavalue; /** * The datatype of this property which determines * the type of datavalue it stores. It can be null, in the case * of string datavalues. */ private final String datatype; /** * Constructor. * * @param property * the id of the property used in this snak * @param value * the target value for this snak */ public ValueSnakImpl(PropertyIdValue property, Value value) { super(property); Validate.notNull(value, "A datavalue must be provided to create a value snak."); datavalue = (value instanceof TermImpl) ? new MonolingualTextValueImpl(((TermImpl) value).getText(), ((TermImpl) value).getLanguageCode()) : value; this.datatype = getJsonPropertyTypeForValueType(datavalue); } /** * Constructor used to deserialize from JSON with Jackson. */ @JsonCreator protected ValueSnakImpl( @JsonProperty("property") String property, @JsonProperty("datatype") String datatype, @JsonProperty("datavalue") Value datavalue, @JacksonInject("siteIri") String siteIri) { super(property, siteIri); Validate.notNull(datavalue, "A datavalue must be provided to create a value snak."); this.datavalue = datavalue; this.datatype = datatype; } @JsonProperty("datavalue") public Value getDatavalue() { return this.datavalue; } @Override @JsonIgnore public Value getValue() { return this.datavalue; } /** * Returns the JSON datatype string. Only for use by Jackson during * serialization. * * The property datatype of the property used for this value snak. This is * redundant information provided in the JSON but not represented in the * datamodel. We keep it and serialize it if given, but if we do not have * it, we set it to null and it will be omitted in the serialization. * * @return the JSON datatype string */ @JsonProperty("datatype") @JsonInclude(value = JsonInclude.Include.NON_NULL) public String getDatatype() { return this.datatype; } @Override @JsonProperty("snaktype") public String getSnakType() { return SnakImpl.JSON_SNAK_TYPE_VALUE; } @Override public T accept(SnakVisitor snakVisitor) { return snakVisitor.visit(this); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean equals(Object obj) { return Equality.equalsValueSnak(this, obj); } @Override public String toString() { return ToString.toString(this); } /** * Infer the JSON datatype (represented as a string) from the data value. * @param value * the datavalue used as snak value * @return * the JSON type (possibly null) to include in the serialization of the snak */ protected static String getJsonPropertyTypeForValueType(Value value) { if (value instanceof TimeValue) { return DatatypeIdImpl.JSON_DT_TIME; } else if (value instanceof ItemIdValue) { return DatatypeIdImpl.JSON_DT_ITEM; } else if (value instanceof PropertyIdValue) { return DatatypeIdImpl.JSON_DT_PROPERTY; } else if (value instanceof StringValue) { return null; } else if (value instanceof GlobeCoordinatesValue) { return DatatypeIdImpl.JSON_DT_GLOBE_COORDINATES; } else if (value instanceof QuantityValue) { return DatatypeIdImpl.JSON_DT_QUANTITY; } else if (value instanceof MonolingualTextValue) { return DatatypeIdImpl.JSON_DT_MONOLINGUAL_TEXT; } else { throw new UnsupportedOperationException("Unsupported value type " + value.getClass()); } } } package-info.java000066400000000000000000000017151444772566300357710ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/implementation/** * Basic implementation for objects representing Wikibase data and related factories. * Main package for Jackson-compatible implementations of the WDTK datamodel * interfaces. * * @author Fredo Erxleben * * @author Markus Kroetzsch */ package org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/000077500000000000000000000000001444772566300317535ustar00rootroot00000000000000AliasUpdate.java000066400000000000000000000054171444772566300347420ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import org.wikidata.wdtk.datamodel.implementation.AliasUpdateImpl; /** * Collection of changes made to entity aliases. This class represents changes * in single language only. Alias update consists either of added (see * {@link #getAdded()}) and removed (see {@link #getRemoved()}) aliases or a new * list of aliases that completely replace old aliases (see * {@link #getRecreated()}). */ public interface AliasUpdate { /** * Empty update that does not alter or remove any aliases. */ AliasUpdate EMPTY = new AliasUpdateImpl(null, Collections.emptyList(), Collections.emptyList()); /** * Checks whether the update is empty. Empty update will not alter alias list in * any way. * * @return {@code true} if the update is empty, {@code false} otherwise */ boolean isEmpty(); /** * Returns language code of aliases in this update. Language code is only * available for non-empty updates. * * @return alias language code or {@link Optional#empty()} when the update is * empty */ Optional getLanguageCode(); /** * Returns the new list of aliases that completely replaces current aliases. If * this list is present, then the update contains no added/removed aliases. * * @return new list of aliases or {@link Optional#empty()} if aliases are not * being recreated */ Optional> getRecreated(); /** * Returns aliases added in this update. If there are any added aliases, then * {@link #getRecreated()} must return {@link Optional#empty()}. It is however * possible to add and remove aliases in the same update. * * @return added aliases */ List getAdded(); /** * Returns aliases removed in this update. If there are any removed aliases, * then {@link #getRecreated()} must return {@link Optional#empty()}. It is * however possible to add and remove aliases in the same update. * * @return removed aliases */ Set getRemoved(); } Claim.java000066400000000000000000000042751444772566300335740ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; import java.util.List; /** * Interface for Wikidata claims. Claims consist of those parts of Wikibase * Statements that express a claim about a subject entity, such as the claim * that Berlin has 3 million inhabitants. Additional information, such as * references and ranks, are not part of the claim. * * @author Markus Kroetzsch * */ public interface Claim { /** * The subject that the claim refers to, e.g., the id of "Berlin". * * @return EntityId of the subject */ EntityIdValue getSubject(); /** * Main Snak of the statement. This Snak refers directly to the subject, * e.g., the {@link ValueSnak} "Population: 3000000". * * @return the main snak */ Snak getMainSnak(); /** * Groups of auxiliary Snaks, also known as qualifiers, that provide * additional context information for this claim. For example, "as of: 2014" * might be a temporal context given for a claim that provides a population * number. The snaks are grouped by the property that they use. * * @return list of snak groups */ List getQualifiers(); /** * Returns an iterator over all qualifiers, without considering qualifier * groups. The relative order of qualifiers is preserved. * * @return iterator over all qualifier snaks */ Iterator getAllQualifiers(); /** * Convenience method to get the value of the claim's main snak, or null if * there is none. * * @return main value of the claim, or null */ Value getValue(); } DataObjectFactory.java000066400000000000000000000744471444772566300361070ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.math.BigDecimal; import java.util.Collection; import java.util.List; import java.util.Map; import org.wikidata.wdtk.datamodel.helpers.FormUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.ItemUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.LexemeUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.MediaInfoUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.PropertyDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.PropertyUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.ReferenceBuilder; import org.wikidata.wdtk.datamodel.helpers.SenseUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; /** * Interface for factories that create data objects that implement the * interfaces from this package. * * @author Markus Kroetzsch * */ public interface DataObjectFactory { /** * Creates an {@link ItemIdValue}. * * @param id * a string of the form Qn... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return an {@link ItemIdValue} corresponding to the input */ ItemIdValue getItemIdValue(String id, String siteIri); /** * Creates a {@link PropertyIdValue}. * * @param id * a string of the form Pn... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return a {@link PropertyIdValue} corresponding to the input */ PropertyIdValue getPropertyIdValue(String id, String siteIri); /** * Creates a {@link LexemeIdValue}. * * @param id * a string of the form Ln... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return a {@link LexemeIdValue} corresponding to the input */ LexemeIdValue getLexemeIdValue(String id, String siteIri); /** * Creates a {@link FormIdValue}. * * @param id * a string of the form Ln...-Fm... where n... and m... are the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return a {@link FormIdValue} corresponding to the input */ FormIdValue getFormIdValue(String id, String siteIri); /** * Creates a {@link SenseIdValue}. * * @param id * a string of the form Ln...-Sm... where n... and m... are the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return a {@link SenseIdValue} corresponding to the input */ SenseIdValue getSenseIdValue(String id, String siteIri); /** * Creates a {@link MediaInfoIdValue}. * * @param id * a string of the form Mn... where n... is the string * representation of a positive integer number * @param siteIri * IRI to identify the site, usually the first part of the entity * IRI of the site this belongs to, e.g., * "http://www.wikidata.org/entity/" * @return a {@link MediaInfoIdValue} corresponding to the input */ MediaInfoIdValue getMediaInfoIdValue(String id, String siteIri); /** * Creates a {@link DatatypeIdValue}. The datatype IRI is usually one of the * constants defined in {@link DatatypeIdValue}, but this is not enforced, * since there might be extensions that provide additional types. * * @param id * the IRI string that identifies the datatype * @return a {@link DatatypeIdValue} corresponding to the input * @deprecated use {@link #getDatatypeIdValueFromJsonId(String)} */ DatatypeIdValue getDatatypeIdValue(String id); /** * Creates a {@link DatatypeIdValue}. The datatype IRI is usually one of the * constants defined in {@link DatatypeIdValue}, but this is not enforced, * since there might be extensions that provide additional types. The JSON * string is its representation in the JSON serialization of properties. * * @param jsonId * the JSON representation of this datatype, which cannot be null * @return a {@link DatatypeIdValue} corresponding to the input */ DatatypeIdValue getDatatypeIdValueFromJsonId(String jsonId); /** * Creates a {@link TimeValue}. * * @param year * a year number, where 0 refers to 1BCE * @param month * a month number between 1 and 12 * @param day * a day number between 1 and 31 * @param hour * an hour number between 0 and 23 * @param minute * a minute number between 0 and 59 * @param second * a second number between 0 and 60 (possible leap second) * @param precision * a value in the range of {@link TimeValue#PREC_DAY}, ..., * {@link TimeValue#PREC_1GY} * @param beforeTolerance * non-negative integer tolerance before the value; see * {@link TimeValue#getBeforeTolerance()} * @param afterTolerance * non-zero, positive integer tolerance before the value; see * {@link TimeValue#getAfterTolerance()} * @param calendarModel * the IRI of the calendar model preferred when displaying the * date; usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO} * @param timezoneOffset * offset in minutes that should be applied when displaying this * time * @return a {@link TimeValue} corresponding to the input */ TimeValue getTimeValue(long year, byte month, byte day, byte hour, byte minute, byte second, byte precision, int beforeTolerance, int afterTolerance, int timezoneOffset, String calendarModel); /** * Creates a {@link GlobeCoordinatesValue}. * * @param latitude * the latitude of the coordinates in degrees * @param longitude * the longitude of the coordinates in degrees * @param precision * the precision of the coordinates in degrees * @param globeIri * IRI specifying the celestial objects of the coordinates * @return a {@link GlobeCoordinatesValue} corresponding to the input */ GlobeCoordinatesValue getGlobeCoordinatesValue(double latitude, double longitude, double precision, String globeIri); /** * Creates a {@link StringValue}. * * @param string * @return a {@link StringValue} corresponding to the input */ StringValue getStringValue(String string); /** * Creates a {@link MonolingualTextValue}. * * @param text * the text of the value * @param languageCode * the language code of the value * @return a {@link MonolingualTextValue} corresponding to the input */ MonolingualTextValue getMonolingualTextValue(String text, String languageCode); /** * Creates a {@link QuantityValue} without a unit of measurement and bounds. * * @param numericValue * the numeric value of this quantity * @return a {@link QuantityValue} corresponding to the input */ QuantityValue getQuantityValue(BigDecimal numericValue); /** * Creates a {@link QuantityValue} without a unit of measurement. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity * @param upperBound * the upper bound of the numeric value of this quantity * @return a {@link QuantityValue} corresponding to the input */ QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound); /** * Creates a {@link QuantityValue} without bounds. * * @param numericValue * the numeric value of this quantity * @param unit * the unit of this quantity, or the empty string if there is no * unit * @return a {@link QuantityValue} corresponding to the input * @deprecated use {@link #getQuantityValue(BigDecimal, ItemIdValue)} */ @Deprecated QuantityValue getQuantityValue(BigDecimal numericValue, String unit); /** * Creates a {@link QuantityValue} without bounds. * * @param numericValue * the numeric value of this quantity * @param unit * the unit of this quantity, or null if there is no * unit * @return a {@link QuantityValue} corresponding to the input */ QuantityValue getQuantityValue(BigDecimal numericValue, ItemIdValue unit); /** * Creates a {@link QuantityValue}. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity * @param upperBound * the upper bound of the numeric value of this quantity * @param unit * the unit of this quantity, or the empty string if there is no * unit * @return a {@link QuantityValue} corresponding to the input * @deprecated use {@link #getQuantityValue(BigDecimal, BigDecimal, BigDecimal, ItemIdValue)} */ @Deprecated QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, String unit); /** * Creates a {@link QuantityValue}. * * @param numericValue * the numeric value of this quantity * @param lowerBound * the lower bound of the numeric value of this quantity * @param upperBound * the upper bound of the numeric value of this quantity * @param unit * the unit of this quantity, or null if there is no * unit * @return a {@link QuantityValue} corresponding to the input */ QuantityValue getQuantityValue(BigDecimal numericValue, BigDecimal lowerBound, BigDecimal upperBound, ItemIdValue unit); /** * Creates a {@link ValueSnak}. * * @param propertyId * @param value * @return a {@link ValueSnak} corresponding to the input */ ValueSnak getValueSnak(PropertyIdValue propertyId, Value value); /** * Creates a {@link SomeValueSnak}. * * @param propertyId * @return a {@link SomeValueSnak} corresponding to the input */ SomeValueSnak getSomeValueSnak(PropertyIdValue propertyId); /** * Creates a {@link NoValueSnak}. * * @param propertyId * @return a {@link NoValueSnak} corresponding to the input */ NoValueSnak getNoValueSnak(PropertyIdValue propertyId); /** * Creates a {@link SnakGroup}. * * @param snaks * a non-empty list of snaks that use the same property * @return a {@link SnakGroup} corresponding to the input */ SnakGroup getSnakGroup(List snaks); /** * Creates a {@link Claim}. It might be more convenient to use * {@link #getStatement} directly if you want to build a statement. * * @param subject * the subject the Statement refers to * @param mainSnak * the main Snak of the Statement * @param qualifiers * the qualifiers of the Statement, grouped in SnakGroups * @return a {@link Claim} corresponding to the input */ Claim getClaim(EntityIdValue subject, Snak mainSnak, List qualifiers); /** * Creates a {@link Reference}. It might be more convenient to use * {@link ReferenceBuilder} instead. * * @param snakGroups * list of snak groups * @return a {@link Reference} corresponding to the input */ Reference getReference(List snakGroups); /** * Creates a {@link Statement}. It might be more convenient to use * {@link StatementBuilder} instead. *

* The string id is used mainly for communication with a Wikibase site, in * order to refer to statements of that site. When creating new statements * that are not on any site, the empty string can be used. * * @param subject * the subject the Statement refers to * @param mainSnak * the main Snak of the Statement * @param qualifiers * the qualifiers of the Statement, grouped in SnakGroups * @param references * the references for the Statement * @param rank * the rank of the Statement * @param statementId * the string id of the Statement * @return a {@link Statement} corresponding to the input */ Statement getStatement(EntityIdValue subject, Snak mainSnak, List qualifiers, List references, StatementRank rank, String statementId); /** * Creates a {@link Statement}. It might be more convenient to use * {@link StatementBuilder} instead. *

* The string id is used mainly for communication with a Wikibase site, in * order to refer to statements of that site. When creating new statements * that are not on any site, the empty string can be used. * * @param claim * the main claim the Statement refers to * @param references * the references for the Statement * @param rank * the rank of the Statement * @param statementId * the string id of the Statement * @return a {@link Statement} corresponding to the input */ Statement getStatement(Claim claim, List references, StatementRank rank, String statementId); /** * Creates a {@link StatementGroup}. * * @param statements * a non-empty list of statements that use the same subject and * main-snak property in their claim * @return a {@link StatementGroup} corresponding to the input */ StatementGroup getStatementGroup(List statements); /** * Creates a {@link SiteLink}. * * @param title * the title string of the linked page, including namespace * prefixes if any * @param siteKey * the string key of the site of the linked article * @param badges * the list of badges of the linked article * @return a {@link SiteLink} corresponding to the input */ SiteLink getSiteLink(String title, String siteKey, List badges); /** * Creates a {@link PropertyDocument}. It might be more convenient to use * the {@link PropertyDocumentBuilder} instead. * * @param propertyId * the id of the property that data is about * @param labels * the list of labels of this property, with at most one label * for each language code * @param descriptions * the list of descriptions of this property, with at most one * description for each language code * @param aliases * the list of aliases of this property * @param statementGroups * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param datatypeId * the datatype of that property * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return a {@link PropertyDocument} corresponding to the input */ PropertyDocument getPropertyDocument(PropertyIdValue propertyId, List labels, List descriptions, List aliases, List statementGroups, DatatypeIdValue datatypeId, long revisionId); /** * Creates an {@link ItemDocument}. It might be more convenient to use the * {@link ItemDocumentBuilder} instead. * * @param itemIdValue * the id of the item that data is about * @param labels * the list of labels of this item, with at most one label for * each language code * @param descriptions * the list of descriptions of this item, with at most one * description for each language code * @param aliases * the list of aliases of this item * @param statementGroups * the list of statement groups of this item; all of them must * have the given itemIdValue as their subject * @param siteLinks * the sitelinks of this item by site key * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return an {@link ItemDocument} corresponding to the input */ ItemDocument getItemDocument(ItemIdValue itemIdValue, List labels, List descriptions, List aliases, List statementGroups, Map siteLinks, long revisionId); /** * Creates an {@link LexemeDocument}. * * @param lexemeIdValue * the id of the lexeme that data is about * @param lexicalCategory * the lexical category to which the lexeme belongs * (noun, verb...) * @param language * the language to which the lexeme belongs * (French, British English...) * @param lemmas * the human readable representations of the lexeme * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given lexemeIdValue as their subject * @param forms * the forms of the lexeme * @param senses * the senses of the lexeme * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return a {@link LexemeDocument} corresponding to the input */ LexemeDocument getLexemeDocument(LexemeIdValue lexemeIdValue, ItemIdValue lexicalCategory, ItemIdValue language, List lemmas, List statementGroups, List forms, List senses, long revisionId); /** * Creates an {@link FormDocument}. * * @param formIdValue * the id of the form that data is about * @param representations * the list of representations of this lexeme, with at most one * lemma for each language code * @param grammaticalFeatures * the grammatical features of the lexeme * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given lexemeIdValue as their subject * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return a {@link FormDocument} corresponding to the input */ FormDocument getFormDocument(FormIdValue formIdValue, List representations, List grammaticalFeatures, List statementGroups, long revisionId); /** * Creates a {@link SenseDocument}. * * @param senseIdValue * the id of the form that data is about * @param glosses * the list of glosses of this lexeme, with at most one * gloss for each language code * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given lexemeIdValue as their subject * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return a {@link SenseDocument} corresponding to the input */ SenseDocument getSenseDocument(SenseIdValue senseIdValue, List glosses, List statementGroups, long revisionId); /** * Creates a {@link MediaInfoDocument}. * * @param mediaInfoIdValue * the id of the form that data is about * @param labels * the list of labels of this media info, with at most one label for * each language code * @param statementGroups * the list of statement groups of this lexeme; all of them must * have the given mediaInfoIdValue as their subject * @param revisionId * the revision ID or 0 if not known; see * {@link EntityDocument#getRevisionId()} * @return a {@link MediaInfoDocument} corresponding to the input */ MediaInfoDocument getMediaInfoDocument(MediaInfoIdValue mediaInfoIdValue, List labels, List statementGroups, long revisionId); /** * Creates new {@link TermUpdate}. It might be more convenient to * use {@link TermUpdateBuilder}. * * @param modified * added or changed values * @param removed * language codes of removed values * @return new {@link TermUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ TermUpdate getTermUpdate( Collection modified, Collection removed); /** * Creates new {@link AliasUpdate}. Callers should specify either * {@code recreated} parameter or {@code added} and {@code removed} parameters, * because combination of the two update approaches is not possible. To remove * all aliases, pass empty list in {@code recreated} parameter. * * @param recreated * new list of aliases that completely replaces the old ones or * {@code null} to not recreate aliases * @param added * aliases added in this update or empty collection for no additions * @param removed * aliases removed in this update or empty collection for no removals * @return new {@link AliasUpdate} * @throws NullPointerException * if {@code added}, {@code removed}, or any alias is {@code null} * @throws IllegalArgumentException * if given invalid combination of parameters */ AliasUpdate getAliasUpdate( List recreated, List added, Collection removed); /** * Creates new {@link StatementUpdate}. It might be more convenient to use * {@link StatementUpdateBuilder}. * * @param added * added statements * @param replaced * replaced statements * @param removed * IDs of removed statements * @return new {@link StatementUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ StatementUpdate getStatementUpdate( Collection added, Collection replaced, Collection removed); /** * Creates new {@link SenseUpdate}. It might be more convenient to use * {@link SenseUpdateBuilder}. * * @param entityId * ID of the sense that is to be updated * @param revisionId * base sense revision to be updated or zero if not available * @param glosses * changes in sense glosses or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @return new {@link SenseUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ SenseUpdate getSenseUpdate( SenseIdValue entityId, long revisionId, TermUpdate glosses, StatementUpdate statements); /** * Creates new {@link FormUpdate}. It might be more convenient to use * {@link FormUpdateBuilder}. * * @param entityId * ID of the form that is to be updated * @param revisionId * base form revision to be updated or zero if not available * @param representations * changes in form representations or {@code null} for no change * @param grammaticalFeatures * new grammatical features of the form or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @return new {@link FormUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ FormUpdate getFormUpdate( FormIdValue entityId, long revisionId, TermUpdate representations, Collection grammaticalFeatures, StatementUpdate statements); /** * Creates new {@link LexemeUpdate}. It might be more convenient to use * {@link LexemeUpdateBuilder}. * * @param entityId * ID of the lexeme that is to be updated * @param revisionId * base lexeme revision to be updated or zero if not available * @param language * new lexeme language or {@code null} for no change * @param lexicalCategory * new lexical category of the lexeme or {@code null} for no change * @param lemmas * changes in lemmas or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @param addedSenses * added senses * @param updatedSenses * updated senses * @param removedSenses * IDs of removed senses * @param addedForms * added forms * @param updatedForms * updated forms * @param removedForms * IDs of removed forms * @return new {@link LexemeUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ LexemeUpdate getLexemeUpdate( LexemeIdValue entityId, long revisionId, ItemIdValue language, ItemIdValue lexicalCategory, TermUpdate lemmas, StatementUpdate statements, Collection addedSenses, Collection updatedSenses, Collection removedSenses, Collection addedForms, Collection updatedForms, Collection removedForms); /** * Creates new {@link MediaInfoUpdate}. It might be more convenient to use * {@link MediaInfoUpdateBuilder}. * * @param entityId * ID of the media that is to be updated * @param revisionId * base media revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param statements * changes in entity statements, possibly empty * @return new {@link MediaInfoUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ MediaInfoUpdate getMediaInfoUpdate( MediaInfoIdValue entityId, long revisionId, TermUpdate labels, StatementUpdate statements); /** * Creates new {@link ItemUpdate}. It might be more convenient to use * {@link ItemUpdateBuilder}. * * @param entityId * ID of the item that is to be updated * @param revisionId * base item revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions or {@code null} for no change * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @param modifiedSiteLinks * added or replaced site links * @param removedSiteLinks * site keys of removed site links * @return new {@link ItemUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ ItemUpdate getItemUpdate( ItemIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements, Collection modifiedSiteLinks, Collection removedSiteLinks); /** * Creates new {@link PropertyUpdate}. It might be more convenient to use * {@link PropertyUpdateBuilder}. * * @param entityId * ID of the property entity that is to be updated * @param revisionId * base property revision to be updated or zero if not available * @param labels * changes in entity labels or {@code null} for no change * @param descriptions * changes in entity descriptions or {@code null} for no change * @param aliases * changes in entity aliases, possibly empty * @param statements * changes in entity statements, possibly empty * @return new {@link PropertyUpdate} * @throws NullPointerException * if any required parameter is {@code null} * @throws IllegalArgumentException * if any parameters or their combination is invalid */ PropertyUpdate getPropertyUpdate( PropertyIdValue entityId, long revisionId, TermUpdate labels, TermUpdate descriptions, Map aliases, StatementUpdate statements); } DatatypeIdValue.java000066400000000000000000000143061444772566300355700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A value that represents one of the available Wikibase datatypes. The method * {@link DatatypeIdValue#getIri() getIri()} will always return one of the * datatype IRIs defined in this interface. * * @author Markus Kroetzsch * */ public interface DatatypeIdValue { /** * IRI of the item datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_ITEM */ String DT_ITEM = "http://wikiba.se/ontology#WikibaseItem"; /** * IRI of the property datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_PROPERTY */ String DT_PROPERTY = "http://wikiba.se/ontology#WikibaseProperty"; /** * IRI of the lexeme datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_LEXEME */ String DT_LEXEME = "http://wikiba.se/ontology#WikibaseLexeme"; /** * IRI of the form datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_FORM */ String DT_FORM = "http://wikiba.se/ontology#WikibaseForm"; /** * IRI of the sense datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_SENSE */ String DT_SENSE = "http://wikiba.se/ontology#WikibaseSense"; /** * IRI of the media info datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_MEDIA_INFO */ String DT_MEDIA_INFO = "http://wikiba.se/ontology#WikibaseMediaInfo"; /** * IRI of the string datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_STRING */ String DT_STRING = "http://wikiba.se/ontology#String"; /** * IRI of the URL datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_URL */ String DT_URL = "http://wikiba.se/ontology#Url"; /** * IRI of the Commons media datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_COMMONS_MEDIA */ String DT_COMMONS_MEDIA = "http://wikiba.se/ontology#CommonsMedia"; /** * IRI of the time datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_TIME */ String DT_TIME = "http://wikiba.se/ontology#Time"; /** * IRI of the globe coordinates datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_GLOBE_COORDINATES */ String DT_GLOBE_COORDINATES = "http://wikiba.se/ontology#GlobeCoordinate"; /** * IRI of the quantity datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_QUANTITY */ String DT_QUANTITY = "http://wikiba.se/ontology#Quantity"; /** * IRI of the monolingual text datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_MONOLINGUAL_TEXT */ String DT_MONOLINGUAL_TEXT = "http://wikiba.se/ontology#Monolingualtext"; /** * IRI of the external identifier datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_EXTERNAL_ID */ String DT_EXTERNAL_ID = "http://wikiba.se/ontology#ExternalId"; /** * IRI of the math datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_MATH */ String DT_MATH = "http://wikiba.se/ontology#Math"; /** * IRI of the geo shape datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_GEO_SHAPE */ String DT_GEO_SHAPE = "http://wikiba.se/ontology#GeoShape"; /** * IRI of the tabular data datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_TABULAR_DATA */ String DT_TABULAR_DATA = "http://wikiba.se/ontology#TabularData"; /** * IRI of the extended date time format (EDTF) datatype in Wikibase. * @deprecated use org.wikidata.wdtk.rdf.Vocabulary.DT_EDTF */ String DT_EDTF = "http://wikiba.se/ontology#Edtf"; /** * String used to refer to the property datatype for Wikibase items, in JSON. */ String JSON_DT_ITEM = "wikibase-item"; /** * String used to refer to the property datatype for Wikibase properties, in JSON. */ String JSON_DT_PROPERTY = "wikibase-property"; /** * String used to refer to the property datatype for globe coordinates, in JSON. */ String JSON_DT_GLOBE_COORDINATES = "globe-coordinate"; /** * String used to refer to the property datatype for urls, in JSON. */ String JSON_DT_URL = "url"; /** * String used to refer to the property datatype for Commons media files, in JSON. */ String JSON_DT_COMMONS_MEDIA = "commonsMedia"; /** * String used to refer to the property datatype for time values, in JSON. */ String JSON_DT_TIME = "time"; /** * String used to refer to the property datatype for quantities, in JSON. */ String JSON_DT_QUANTITY = "quantity"; /** * String used to refer to the property datatype for strings, in JSON. */ String JSON_DT_STRING = "string"; /** * String used to refer to the property datatype for monolingual text, in JSON. */ String JSON_DT_MONOLINGUAL_TEXT = "monolingualtext"; /** * String used to refer to the property datatype for external identifiers, in JSON. */ String JSON_DT_EXTERNAL_ID = "external-id"; /** * String used to refer to the property datatype for mathematical expressions, in JSON. */ String JSON_DT_MATH = "math"; /** * String used to refer to the property datatype for Geo shapes, in JSON. */ String JSON_DT_GEO_SHAPE = "geo-shape"; /** * String used to refer to the property datatype for EDTF dates, in JSON. */ String JSON_DT_EDTF = "edtf"; /** * Get the IRI of this entity. * * @return String with the IRI * @deprecated use org.wikidata.wdtk.rdf.AbstractRdfConverter.getDatatypeIri() from the wdtk-rdf module */ String getIri(); /** * The string identifying this datatype in the JSON serialization of a property. */ String getJsonString(); } DocumentDataFilter.java000066400000000000000000000141241444772566300362570ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Set; /** * This class is used to describe restrictions to data in {@link EntityDocument} * objects. This is used, e.g., to restrict only part of the data when copying * objects. * * @author Markus Kroetzsch */ public class DocumentDataFilter { /** * Set of language codes to restrict terms (labels, descriptions, aliases). * If set to null, terms will not be restricted. */ private Set languageFilter = null; /** * Set of property id values to restrict statements. If set to null, * statements will not be restricted. */ private Set propertyFilter = null; /** * Set of site keys to restrict site keys. If set to null, site links will * not be restricted. */ private Set siteLinkFilter = null; /** * Returns the (possibly empty) set of language codes that are used to * filter data, or null if no such filter is configured (default). If not * equal to null, only terms in the given languages will be included. * * @return set of language codes to use for filtering */ public Set getLanguageFilter() { return this.languageFilter; } /** * Sets the (possibly empty) set of language codes that are used to filter * data. Setting this to null disables this filter (this is the default). If * not equal to null, only terms in the given language will be included. *

* The language filter is not applied to monolingual text values in * statements. Only labels, descriptions, and aliases are filtered. * * @param languageFilter * set of language codes to restrict to */ public void setLanguageFilter(Set languageFilter) { this.languageFilter = languageFilter; } /** * Returns the (possibly empty) set of {@link PropertyIdValue} objects that * are used to filter statements, or null if no such filter is configured * (default). If not equal to null, only statements using the given * properties will be included. * * @return set of properties to use for filtering */ public Set getPropertyFilter() { return this.propertyFilter; } /** * Sets the (possibly empty) set of {@link PropertyIdValue} objects that are * used to filter statements. Setting this to null disables this filter * (this is the default). If not equal to null, only statements using the * given properties will be included. *

* The property filter is not applied to qualifiers and references in * statements. Only the main property of statements is filtered. * * @param propertyFilter * set of properties to use for filtering */ public void setPropertyFilter(Set propertyFilter) { this.propertyFilter = propertyFilter; } /** * Returns the (possibly empty) set of site keys that are used to filter * {@link SiteLink} objects, or null if no such filter is configured * (default). If not equal to null, only site links for the given sites will * be included. * * @return set of site keys to use for filtering */ public Set getSiteLinkFilter() { return this.siteLinkFilter; } /** * Sets the (possibly empty) set of site keys that are used to filter * {@link SiteLink} objects. Setting this to null disables this filter (this * is the default). If not equal to null, only site links for the given * sites will be included. * * @param siteLinkFilter * set of site keys to use for filtering */ public void setSiteLinkFilter(Set siteLinkFilter) { this.siteLinkFilter = siteLinkFilter; } /** * Returns true if the given language is included (not filtered). * * @param languageCode * code of the language to check * @return true if there is no language filter, or a language filter that * includes the given language */ public boolean includeLanguage(String languageCode) { return this.languageFilter == null || this.languageFilter.contains(languageCode); } /** * Returns true if the given property is included (not filtered). * * @param propertyIdValue * property id to check * @return true if there is no property filter, or a property filter that * includes the given property */ public boolean includePropertyId(PropertyIdValue propertyIdValue) { return this.propertyFilter == null || this.propertyFilter.contains(propertyIdValue); } /** * Returns true if the given site link is included (not filtered). * * @param siteLink * key of the site to check * @return true if there is no site link filter, or a site link filter that * includes the given site */ public boolean includeSiteLink(String siteLink) { return this.siteLinkFilter == null || this.siteLinkFilter.contains(siteLink); } /** * Returns true if terms in all languages are excluded. * * @return true if all terms are excluded */ public boolean excludeAllLanguages() { return this.languageFilter != null && this.languageFilter.isEmpty(); } /** * Returns true if statements for all properties are excluded. * * @return true if all statements are excluded */ public boolean excludeAllProperties() { return this.propertyFilter != null && this.propertyFilter.isEmpty(); } /** * Returns true if site links for all sites are excluded. * * @return true if all site links are excluded */ public boolean excludeAllSiteLinks() { return this.siteLinkFilter != null && this.siteLinkFilter.isEmpty(); } } EntityDocument.java000066400000000000000000000030221444772566300355070ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Interface for datasets that describe an entity. * * @author Markus Kroetzsch * */ public interface EntityDocument { /** * Returns the ID of the entity that the data refers to * * @return entity id */ EntityIdValue getEntityId(); /** * Returns the revision ID of this document, or 0 if no id is known. The * revision ID is a number stored by MediaWiki to indicate the version of a * document. It is based on a global counter that is incremented on each * edit. Not all sources of entity document data may provide the revision * ID, as it is not strictly part of the data, but part of the document * metadata. * * @return revision id */ long getRevisionId(); /** * Returns a copy of this document with an updated revision id. */ EntityDocument withRevisionId(long newRevisionId); } EntityDocumentDumpProcessor.java000066400000000000000000000034751444772566300402510ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Interface for classes that process a list of {@link EntityDocument} objects. * The difference to {@link EntityDocumentProcessor} is that there are * additional methods to start and end processing. They can be used to do * initial processing steps (open files, write headers etc.) and final * processing steps (closing files etc.), respectively. *

* Implementations expect callers to invoke the methods in a strict order: first * {@link #open()}, followed by any number of calls to * {@link #processItemDocument(ItemDocument)} and * {@link #processPropertyDocument(PropertyDocument)}, and finally * {@link #close()}. Any other order of invocation may lead to undefined * results. In particular, implementations are not expected to guard against * such wrong use. * * @author Michael Günther * */ public interface EntityDocumentDumpProcessor extends EntityDocumentProcessor { /** * Starts the processing by performing any initial steps to prepare * processing. */ void open(); /** * Finishes the processing by performing any final steps, such as closing * resources. */ void close(); } EntityDocumentProcessor.java000066400000000000000000000037501444772566300374170ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Interface for classes that are able to process {@link EntityDocument} objects * in some way. Classes that implement this can subscribe to receive entity * documents as obtained, e.g., from parsing dump files. * * @author Markus Kroetzsch * */ public interface EntityDocumentProcessor { /** * Processes the given ItemDocument. * * @param itemDocument * the ItemDocument */ default void processItemDocument(ItemDocument itemDocument) { } /** * Processes the given PropertyDocument. * * @param propertyDocument * the PropertyDocument */ default void processPropertyDocument(PropertyDocument propertyDocument) { } /** * Processes the given LexemeDocument. * * @param lexemeDocument * the LexemeDocument */ default void processLexemeDocument(LexemeDocument lexemeDocument) { } /** * Processes the given MediaInfoDocument. * * @param mediaInfoDocument * the MediaInfoDocument */ default void processMediaInfoDocument(MediaInfoDocument mediaInfoDocument) { } /** * Processes the given EntityRedirectDocument. * * @param entityRedirectDocument * the EntityRedirectDocument */ default void processEntityRedirectDocument(EntityRedirectDocument entityRedirectDocument) { } } EntityDocumentProcessorBroker.java000066400000000000000000000043501444772566300405610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.HashSet; import java.util.Set; /** * Simple broker implementation of {@link EntityDocumentProcessor} which * distributes entity documents to multiple registered listeners. * * @author Markus Kroetzsch * */ public class EntityDocumentProcessorBroker implements EntityDocumentProcessor { private final Set entityDocumentProcessors = new HashSet<>(); /** * Registers a listener which will be called for all entity documents that * are processed. The method avoids duplicates in the sense that the exact * same object cannot be registered twice. * * @param entityDocumentProcessor * the listener to register */ public void registerEntityDocumentProcessor(EntityDocumentProcessor entityDocumentProcessor) { entityDocumentProcessors.add(entityDocumentProcessor); } @Override public void processItemDocument(ItemDocument itemDocument) { for (EntityDocumentProcessor entityDocumentProcessor : entityDocumentProcessors) { entityDocumentProcessor.processItemDocument(itemDocument); } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { for (EntityDocumentProcessor entityDocumentProcessor : entityDocumentProcessors) { entityDocumentProcessor.processPropertyDocument(propertyDocument); } } @Override public void processLexemeDocument(LexemeDocument lexemeDocument) { for (EntityDocumentProcessor entityDocumentProcessor : entityDocumentProcessors) { entityDocumentProcessor.processLexemeDocument(lexemeDocument); } } } EntityDocumentProcessorFilter.java000066400000000000000000000045361444772566300405700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.DatamodelFilter; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; /** * Implementation of {@link EntityDocumentProcessor} that acts as a filter, * removing some of the data from {@link EntityDocument} objects before passing * them on to another processor. There is an overhead involved in using this, * even if no filters are set, since a deep copy of the data is created to * filter it. * * * @author Markus Kroetzsch * */ public class EntityDocumentProcessorFilter implements EntityDocumentProcessor { private final EntityDocumentProcessor entityDocumentProcessor; private final DatamodelFilter datamodelFilter; /** * Constructor. * * @param entityDocumentProcessor * the processor to use on the filtered data * @param filter * the filter settings to be used */ public EntityDocumentProcessorFilter( EntityDocumentProcessor entityDocumentProcessor, DocumentDataFilter filter) { this.entityDocumentProcessor = entityDocumentProcessor; this.datamodelFilter = new DatamodelFilter(new DataObjectFactoryImpl(), filter); } @Override public void processItemDocument(ItemDocument itemDocument) { entityDocumentProcessor.processItemDocument(datamodelFilter.filter(itemDocument)); } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { entityDocumentProcessor.processPropertyDocument(datamodelFilter.filter(propertyDocument)); } @Override public void processLexemeDocument(LexemeDocument lexemeDocument) { entityDocumentProcessor.processLexemeDocument(datamodelFilter.filter(lexemeDocument)); } } EntityIdValue.java000066400000000000000000000075731444772566300353010ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * An entity is a Value that is represented by a page in Wikibase. It is * identified by its id, corresponding to the title of that page. Typical * entities are Items (with identifiers of the form Q1234) and Properties (with * identifiers of the form P1234). *

* When considering entities from multiple sites, the (local) ID alone is not * enough to identify an entity unambiguously. In this case, the site IRI also * needs to be taken into account. *

* An alternative to using the local ID and site IRI together is to use the full * IRI. By default, this is computed by appending the local ID to the site IRI. * However, for some sites and some entity types, more elaborate computations * might be required, so this construction scheme for IRIs should not be * presumed. *

* The full IRI of an entity is used in export formats like RDF, but also * internally, e.g., for identifying the calendar model of time values. * * @author Markus Kroetzsch * */ public interface EntityIdValue extends IriIdentifiedValue { /** * IRI of the type of an entity that is an item. */ String ET_ITEM = "http://www.wikidata.org/ontology#Item"; /** * IRI of the type of an entity that is a property. */ String ET_PROPERTY = "http://www.wikidata.org/ontology#Property"; /** * IRI of the type of an entity that is a lexeme. */ String ET_LEXEME = "http://www.wikidata.org/ontology#Lexeme"; /** * IRI of the type of an entity that is a form. */ String ET_FORM = "http://www.wikidata.org/ontology#Form"; /** * IRI of the type of an entity that is a sense. */ String ET_SENSE = "http://www.wikidata.org/ontology#Sense"; /** * IRI of the type of an entity that is a media info. */ String ET_MEDIA_INFO = "http://www.wikidata.org/ontology#MediaInfo"; /** * IRI of the type of an unsupported entity, when no type could be * detected from the JSON representation. The IRI for ids associated * with type information are constructed using the same format as above. */ String ET_UNSUPPORTED = "http://www.wikidata.org/ontology#Unsupported"; /** * The site IRI of "local" identifiers. These are used to mark internal ids * that are not found on any external site. Components that send data to * external services or that create data exports should omit such ids, if * possible. */ String SITE_LOCAL = "http://localhost/entity/"; /** * Returns the type of this entity. This should be an IRI that identifies an * entity type, such as {@link EntityIdValue#ET_ITEM} or * {@link EntityIdValue#ET_PROPERTY}. * * @return IRI string to identify the type of the entity */ String getEntityType(); /** * Returns the id of this entity. * * @return String id of this entity */ String getId(); /** * Returns an IRI that identifies the site that this entity comes from,, * e.g., "http://www.wikidata.org/entity/" for Wikidata. * * @return the site IRI string */ String getSiteIri(); /** * Checks whether this is a placeholder ID. Placeholder IDs, for example * {@link ItemIdValue#NULL}, are often used when creating new entities. * * @return {@code true} if this is a placeholder ID, {@code false} otherwise */ boolean isPlaceholder(); } EntityRedirectDocument.java000066400000000000000000000020741444772566300371770ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Interface for entity redirect. * * @author Thomas Pellissier Tanon * */ public interface EntityRedirectDocument extends EntityDocument { /** * Returns the ID of the entity that the redirection target to. * * @return entity id */ EntityIdValue getTargetId(); @Override EntityRedirectDocument withRevisionId(long newRevisionId); } EntityUpdate.java000066400000000000000000000027311444772566300351610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to an entity via Wikibase API. */ public interface EntityUpdate { /** * Returns ID of the entity that is being updated. * * @return ID of the updated entity */ EntityIdValue getEntityId(); /** * Returns entity revision, upon which this update is built. This might not be * the latest revision of the entity as currently stored in Wikibase. If base * revision was not provided, this method returns zero. * * @return entity revision that is being updated or zero */ long getBaseRevisionId(); /** * Checks whether the update is empty. Empty update will not change the entity * in any way. * * @return {@code true} if the update is empty, {@code false} otherwise */ boolean isEmpty(); } FormDocument.java000066400000000000000000000054001444772566300351400ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.List; import java.util.Map; import java.util.Set; /** * Interface for lexemes forms. * * @author Thomas Pellissier Tanon * */ public interface FormDocument extends StatementDocument { /** * Returns the ID of the entity that the data refers to * * @return form id */ @Override FormIdValue getEntityId(); /** * Return the human readable representations of the form indexed by Wikimedia language code * * @return a map from Wikimedia language code to the representations */ Map getRepresentations(); /** * Return the IDs of the grammatical features of the form (masculine, singular...) * * @return item ids */ List getGrammaticalFeatures(); /** * Returns a new version of this document with updated ID. * * @param newEntityId * new ID of the document * @return document with updated ID */ FormDocument withEntityId(FormIdValue newEntityId); /** * Returns a copy of this document with an updated revision id. */ @Override FormDocument withRevisionId(long newRevisionId); FormDocument withRepresentation(MonolingualTextValue representation); FormDocument withGrammaticalFeature(ItemIdValue grammaticalFeature); /** * Returns a new version of this document which includes the * statement provided. If the identifier of this statement matches * that of any other statement for the same property, then the * existing statement will be replaced by the new one. Otherwise, * the new statement will be added at the end of the list of statements * in this group. * * @param statement * the statement to add or update in the document */ @Override FormDocument withStatement(Statement statement); /** * Returns a new version of this document where all statements matching * any of the statement ids provided have been removed. These statements * can use different properties. * * @param statementIds * the identifiers of the statements to remove */ @Override FormDocument withoutStatementIds(Set statementIds); } FormIdValue.java000066400000000000000000000042131444772566300347140ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; /** * The id of a Wikibase Lexeme Form. Objects implementing this interface always return * {@link EntityIdValue#ET_FORM} for {@link EntityIdValue#getEntityType() * getEntityType}. * * @author Thomas Pellissier Tanon * */ public interface FormIdValue extends EntityIdValue { /** * Returns the id of the lexeme of which it is a form. * * @return A lexeme id */ LexemeIdValue getLexemeId(); /** * Fixed {@link FormIdValue} that refers to a non-existing form. It can be used * as a placeholder object in situations where the entity id is irrelevant. */ FormIdValue NULL = new FormIdValue() { @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String getIri() { return getSiteIri() + getId(); } @Override public String getSiteIri() { return EntityIdValue.SITE_LOCAL; } @Override public String getId() { return "L0-F0"; } @Override public String getEntityType() { return ET_FORM; } @Override public LexemeIdValue getLexemeId() { return LexemeIdValue.NULL; } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean isPlaceholder() { return true; } }; } FormUpdate.java000066400000000000000000000030421444772566300346040ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Optional; import java.util.Set; /** * Collection of changes that can be applied to form entity. */ public interface FormUpdate extends StatementDocumentUpdate { @Override FormIdValue getEntityId(); /** * Returns changes in form representations. * * @return update of form representations, possibly empty */ TermUpdate getRepresentations(); /** * Returns new grammatical features of the form assigned in this update. If * grammatical features are not changing in this update, this method returns * {@link Optional#empty()}. If grammatical features are being removed without * replacement, this method returns an empty set. * * @return new grammatical features or {@link Optional#empty()} if grammatical * features do not change */ Optional> getGrammaticalFeatures(); } GlobeCoordinatesValue.java000066400000000000000000000122111444772566300367540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Globe coordinates specify a position on some globe (usually Earth, but * possibly also another celestial body, such as Mars). *

* Altitude is not supported in this value. *

* All numeric data in coordinates is represented by floating point numbers. The * one general problem with any underlying number format is the conversion * between degrees (with fraction) and a degrees-minute-second view, which will * always lead to a loss in arithmetic precision that one has to live with. *

* Precision is measured in degrees, but must be a positive (non-zero) number. * * @author Markus Kroetzsch * */ public interface GlobeCoordinatesValue extends Value { /** * Precision constant for globe coordinates that are precise to ten degrees. */ double PREC_TEN_DEGREE = 10.0; /** * Precision constant for globe coordinates that are precise to the degree. */ double PREC_DEGREE = 1.0; /** * Precision constant for globe coordinates that are precise to the tenth of * a degree. */ double PREC_DECI_DEGREE = 0.1; /** * Precision constant for globe coordinates that are precise to the * arcminute. */ double PREC_ARCMINUTE = 1.0 / 60; /** * Precision constant for globe coordinates that are precise to the * hundredth of a degree. */ double PREC_CENTI_DEGREE = 0.01; /** * Precision constant for globe coordinates that are precise to the * thousandth of a degree. */ double PREC_MILLI_DEGREE = 0.001; /** * Precision constant for globe coordinates that are precise to the * arcsecond. */ double PREC_ARCSECOND = 1.0 / 3600; /** * Precision constant for globe coordinates that are precise to the * ten-thousandth of a degree. */ double PREC_HUNDRED_MICRO_DEGREE = 0.0001; /** * Precision constant for globe coordinates that are precise to the tenth of * an arcsecond. */ double PREC_DECI_ARCSECOND = 1.0 / 36000; /** * Precision constant for globe coordinates that are precise to the * hundred-thousandth of a degree. */ double PREC_TEN_MICRO_DEGREE = 0.00001; /** * Precision constant for globe coordinates that are precise to the * hundredth of an arcsecond. */ double PREC_CENTI_ARCSECOND = 1.0 / 360000; /** * Precision constant for globe coordinates that are precise to the * millionth of a degree. */ double PREC_MICRO_DEGREE = 0.000001; /** * Precision constant for globe coordinates that are precise to the * thousandth of an arcsecond. */ double PREC_MILLI_ARCSECOND = 1.0 / 3600000; /** * IRI of the Earth. Used frequently to specify the globe. */ String GLOBE_EARTH = "http://www.wikidata.org/entity/Q2"; /** * IRI of the the Earth's Moon. */ String GLOBE_MOON = "http://www.wikidata.org/entity/Q405"; /** * Get the latitude of this value in degrees. For Earth, the latitude value * is generally the geographic latitude (as opposed to the geocentric * latitude etc.). For other celestial bodies, the meaning of the latitude * can vary. It is part of the semantics of the property to specify which * coordinate system should be assumed for each globe (possibly depending on * further information, such as qualifiers). * * @return latitude in degrees */ double getLatitude(); /** * Get the longitude of this value in degrees. For celestial bodies other * than Earth, the meaning of the longitude can vary. It is part of the * semantics of the property to specify which coordinate system should be * assumed for each globe (possibly depending on further information, such * as qualifiers). * * @return longitude in degrees */ double getLongitude(); /** * Get the precision of the value in degrees. This value indicates that both * latitude and longitude might be off by that precision. Obviously, since * the absolute distance of one degree may vary depending on the * coordinates, this leads to a non-uniform notion of precision. For * example, precision of one whole degree at coordinates 80N, 145E is a much * smaller distance from the spot than the same precision at 10N, 145E. * * @return precision in degrees */ double getPrecision(); /** * Get the IRI of the globe that these coordinates refer to. In most cases * this is {@link GlobeCoordinatesValue#GLOBE_EARTH}. * * @return IRI of a globe. */ String getGlobe(); /** * Get the {@link ItemIdValue} of the globe that these coordinates refer to. * * @throws IllegalArgumentException if the globe is not a valid item IRI */ ItemIdValue getGlobeItemId(); } IriIdentifiedValue.java000066400000000000000000000027461444772566300362550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * An IriIdentifiedValue is a Value that is identified by an IRI, which can be * used as the canonical identifier for the entity outside of the context of a * particular Wikibase installation. *

* This is not a primitive datatype of the Wikibase datamodel, but a convenience * interface to access IRIs uniformly for values that provide them. *

* This interface is intended as a general interface for all types of * {@link Value} that provide an IRI for their content. See {@link StringValue} for * the value that is used when users enter a URL (or IRI) directly in Wikibase. * * @author Markus Kroetzsch * */ public interface IriIdentifiedValue extends Value { /** * Get the IRI of this entity. * * @return String with the IRI */ String getIri(); } ItemDocument.java000066400000000000000000000051151444772566300351360ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.List; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Map; import java.util.Set; /** * Interface for datasets that describe items. It extends {@link EntityDocument} * with information about site links and statements. * * @author Markus Kroetzsch * */ public interface ItemDocument extends TermedStatementDocument { /** * Return the ID of the item that the data refers to. * * @return item id */ @Override ItemIdValue getEntityId(); /** * Get a Map of site keys to {@link SiteLink} objects. * * @return map of SiteLinks */ Map getSiteLinks(); /** * Returns a new version of this document with updated ID. * * @param newEntityId * new ID of the document * @return document with updated ID */ ItemDocument withEntityId(ItemIdValue newEntityId); @Override ItemDocument withRevisionId(long newRevisionId); @Override ItemDocument withLabel(MonolingualTextValue newLabel); @Override ItemDocument withDescription(MonolingualTextValue newDescription); @Override ItemDocument withAliases(String language, List aliases); @Override ItemDocument withStatement(Statement statement); @Override ItemDocument withoutStatementIds(Set statementIds); } ItemIdValue.java000066400000000000000000000036511444772566300347140ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * The id of a Wikibase Item. Objects implementing this interface always return * {@link EntityIdValue#ET_ITEM} for {@link EntityIdValue#getEntityType() * getEntityType}. * * @author Markus Kroetzsch * */ public interface ItemIdValue extends EntityIdValue { /** * Fixed {@link ItemIdValue} that refers to a non-existing item. Can be used * as a placeholder object in situations where the entity id is irrelevant. */ ItemIdValue NULL = new ItemIdValue() { @Override public String getIri() { return getSiteIri() + getId(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String getEntityType() { return ET_ITEM; } @Override public String getId() { return "Q0"; } @Override public String getSiteIri() { return EntityIdValue.SITE_LOCAL; } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean isPlaceholder() { return true; } }; } ItemUpdate.java000066400000000000000000000025171444772566300346050ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Map; import java.util.Set; /** * Collection of changes that can be applied to item entity. */ public interface ItemUpdate extends TermedStatementDocumentUpdate { @Override ItemIdValue getEntityId(); /** * Returns site links added or modified in this update. Existing site links are * preserved if their site key is not listed here. * * @return added or modified site links indexed by site key */ Map getModifiedSiteLinks(); /** * Returns site keys of site links removed in this update. * * @return site keys of removed site links */ Set getRemovedSiteLinks(); } LabeledDocument.java000066400000000000000000000036331444772566300355730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Map; /** * Interface for EntityDocuments that can be described by labels * in several languages. * * @author Thomas Pellissier Tanon */ public interface LabeledDocument extends EntityDocument { /** * Return a Map from Wikibase language codes to labels. * * @return the map of labels */ Map getLabels(); /** * Returns the string label for the given language code, or null if there is * no label for this code. This is a convenience method for accessing the * data that can be obtained via {@link #getLabels()}. * * @param languageCode * a string that represents language * @return the label string or null if it does not exists */ default String findLabel(String languageCode) { MonolingualTextValue value = this.getLabels().get(languageCode); return (value != null) ? value.getText() : null; } /** * Returns a copy of this document with an updated revision id. */ @Override LabeledDocument withRevisionId(long newRevisionId); /** * Returns a new version of this document with a new label * (which overrides any existing label for this language). */ LabeledDocument withLabel(MonolingualTextValue newLabel); } LabeledDocumentUpdate.java000066400000000000000000000017501444772566300367340ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to an entity that has labels. */ public interface LabeledDocumentUpdate extends EntityUpdate { /** * Returns changes in entity labels. * * @return update of entity labels, possibly empty */ TermUpdate getLabels(); } LabeledStatementDocument.java000066400000000000000000000026621444772566300374610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Set; /** * This interface just joins {@link LabeledDocument} and {@link StatementDocument}. * * It is necessary to introduce this interface because the conflict between * the return types of the withRevisionId method in both interfaces needs to be resolved. * * @author Thomas Pellissier Tanon */ public interface LabeledStatementDocument extends LabeledDocument, StatementDocument { @Override LabeledStatementDocument withRevisionId(long newRevisionId); @Override LabeledStatementDocument withLabel(MonolingualTextValue newLabel); @Override LabeledStatementDocument withStatement(Statement statement); @Override LabeledStatementDocument withoutStatementIds(Set statementIds); } LabeledStatementDocumentUpdate.java000066400000000000000000000016421444772566300406210ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to an entity that has labels and * statements. */ public interface LabeledStatementDocumentUpdate extends LabeledDocumentUpdate, StatementDocumentUpdate { } LexemeDocument.java000066400000000000000000000105641444772566300354630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.List; import java.util.Map; import java.util.Set; /** * Interface for datasets that describe lexemes. * * @author Thomas Pellissier Tanon * */ public interface LexemeDocument extends StatementDocument { /** * Returns the ID of the entity that the data refers to * * @return lexeme id */ @Override LexemeIdValue getEntityId(); /** * Return the ID of the lexical category to which the lexeme belongs * (noun, verb...) * * @return item id */ ItemIdValue getLexicalCategory(); /** * Return the ID of the language to which the lexeme belongs * (French, British English...) * * @return item id */ ItemIdValue getLanguage(); /** * Return the human readable representations of the lexeme indexed by Wikimedia language code * * @return a map from Wikimedia language code to the lemma */ Map getLemmas(); /** * Return the lexeme forms * * @return the list of forms */ List getForms(); /** * Return the inner form having the given id * * @throws IndexOutOfBoundsException if there is no form with this id in the document */ FormDocument getForm(FormIdValue formId); /** * Return the lexeme senses * * @return the list of senses */ List getSenses(); /** * Return the inner sense having the given id * * @throws IndexOutOfBoundsException if there is no sense with this id in the document */ SenseDocument getSense(SenseIdValue formId); /** * Returns a new version of this document with updated ID. * * @param newEntityId * new ID of the document * @return document with updated ID */ LexemeDocument withEntityId(LexemeIdValue newEntityId); /** * Returns a copy of this document with an updated revision id. */ @Override LexemeDocument withRevisionId(long newRevisionId); LexemeDocument withLexicalCategory(ItemIdValue newLexicalCategory); LexemeDocument withLanguage(ItemIdValue newLanguage); LexemeDocument withLemma(MonolingualTextValue lemma); /** * Returns a new version of this document which includes the * statement provided. If the identifier of this statement matches * that of any other statement for the same property, then the * existing statement will be replaced by the new one. Otherwise, * the new statement will be added at the end of the list of statements * in this group. * * @param statement * the statement to add or update in the document */ @Override LexemeDocument withStatement(Statement statement); /** * Returns a new version of this document where all statements matching * any of the statement ids provided have been removed. These statements * can use different properties. * * @param statementIds * the identifiers of the statements to remove */ @Override LexemeDocument withoutStatementIds(Set statementIds); /** * Creates a new {@link FormDocument} for this lexeme. * The form is not added to the {@link LexemeDocument} object, * it should be done with {@link LexemeDocument#withForm}. */ FormDocument createForm(List representations); /** * Adds a {@link FormDocument} to this lexeme. * The form id should be prefixed with the lexeme id. */ LexemeDocument withForm(FormDocument form); /** * Creates a new {@link SenseDocument} for this Lexeme. * The form is not added to the {@link LexemeDocument} object, * it should be done with {@link LexemeDocument#withSense}. */ SenseDocument createSense(List glosses); /** * Adds a {@link SenseDocument} to this lexeme. * The sense id should be prefixed with the lexeme id. */ LexemeDocument withSense(SenseDocument sense); } LexemeIdValue.java000066400000000000000000000037031444772566300352330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * The id of a Wikibase Lexeme. Objects implementing this interface always return * {@link EntityIdValue#ET_LEXEME} for {@link EntityIdValue#getEntityType() * getEntityType}. * * @author Thomas Pellissier Tanon * */ public interface LexemeIdValue extends EntityIdValue { /** * Fixed {@link LexemeIdValue} that refers to a non-existing lexeme. It can be used * as a placeholder object in situations where the entity id is irrelevant. */ LexemeIdValue NULL = new LexemeIdValue() { @Override public String getIri() { return getSiteIri() + getId(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String getEntityType() { return ET_LEXEME; } @Override public String getId() { return "L0"; } @Override public String getSiteIri() { return EntityIdValue.SITE_LOCAL; } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean isPlaceholder() { return true; } }; } LexemeUpdate.java000066400000000000000000000054141444772566300351250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; /** * Collection of changes that can be applied to lexeme entity. */ public interface LexemeUpdate extends StatementDocumentUpdate { @Override LexemeIdValue getEntityId(); /** * Returns new lexeme language assigned in this update. If language code is not * changing, this method returns {@link Optional#empty()}. * * @return new lexeme language or {@link Optional#empty()} if it is not changing */ Optional getLanguage(); /** * Returns new lexical category assigned to the lexeme in this update. If * lexical category is not changing, this method returns * {@link Optional#empty()}. * * @return new lexical category or {@link Optional#empty()} if it is not * changing */ Optional getLexicalCategory(); /** * Returns changes in lemmas. * * @return update of lemmas, possibly empty */ TermUpdate getLemmas(); /** * Returns new forms added to the lexeme in this update. Existing forms are * preserved by default. * * @return list of new forms */ List getAddedForms(); /** * Returns lexeme forms modified in this update. Forms not listed here are * preserved by default. * * @return modified forms indexed by ID */ Map getUpdatedForms(); /** * Returns IDs of forms removed from the lexeme in this update. * * @return IDs of removed lexeme forms */ Set getRemovedForms(); /** * Returns new senses added to the lexeme in this update. Existing senses are * preserved by default. * * @return list of new senses */ List getAddedSenses(); /** * Returns lexeme senses modified in this update. Senses not listed here are * preserved by default. * * @return modified senses indexed by ID */ Map getUpdatedSenses(); /** * Returns IDs of senses removed from the lexeme in this update. * * @return IDs of removed lexeme senses */ Set getRemovedSenses(); } MediaInfoDocument.java000066400000000000000000000030621444772566300360720ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Set; /** * Interface for datasets that describe media entities. * * @author Thomas Pellissier Tanon * */ public interface MediaInfoDocument extends LabeledStatementDocument { /** * Returns the ID of the entity that the data refers to * * @return media info id */ @Override MediaInfoIdValue getEntityId(); /** * Returns a new version of this document with updated ID. * * @param newEntityId * new ID of the document * @return document with updated ID */ MediaInfoDocument withEntityId(MediaInfoIdValue newEntityId); @Override MediaInfoDocument withRevisionId(long newRevisionId); @Override MediaInfoDocument withLabel(MonolingualTextValue newLabel); @Override MediaInfoDocument withStatement(Statement statement); @Override MediaInfoDocument withoutStatementIds(Set statementIds); } MediaInfoIdValue.java000066400000000000000000000037171444772566300356540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * The id of a Wikibase MediaInfo. Objects implementing this interface always return * {@link EntityIdValue#ET_MEDIA_INFO} for {@link EntityIdValue#getEntityType() * getEntityType}. * * @author Thomas Pellissier Tanon * */ public interface MediaInfoIdValue extends EntityIdValue { /** * Fixed {@link MediaInfoIdValue} that refers to a non-existing item. Can be used * as a placeholder object in situations where the entity id is irrelevant. */ MediaInfoIdValue NULL = new MediaInfoIdValue() { @Override public String getIri() { return getSiteIri() + getId(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String getEntityType() { return ET_MEDIA_INFO; } @Override public String getId() { return "M0"; } @Override public String getSiteIri() { return EntityIdValue.SITE_LOCAL; } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean isPlaceholder() { return true; } }; } MediaInfoUpdate.java000066400000000000000000000016211444772566300355350ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to media entity. */ public interface MediaInfoUpdate extends LabeledStatementDocumentUpdate { @Override MediaInfoIdValue getEntityId(); } MonolingualTextValue.java000066400000000000000000000025261444772566300366720ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A monolingual text value represents a text (string) in a certain language. * * @author Markus Kroetzsch * */ public interface MonolingualTextValue extends Value { /** * Get the text of this value. * * @return a string */ String getText(); /** * Get the language code of this value. The codes are usually based on the * codes used internally in Wikibase, which in turn are the codes used in * the Universal Language Selector extension. However, the data model as * such does not restrict the strings that might be used here. * * @return a string that represents language */ String getLanguageCode(); }NoValueSnak.java000066400000000000000000000015751444772566300347350ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A NoValueSnak represents the information that a given property has no value. * * @author Markus Kroetzsch * */ public interface NoValueSnak extends Snak { } PropertyDocument.java000066400000000000000000000040421444772566300360620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; import java.util.List; import java.util.Set; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Interface for datasets that describe properties. It extends * {@link EntityDocument} with information about the datatype of a property. *

* Claims or Statements on properties might be supported in the future. * * @author Markus Kroetzsch * */ public interface PropertyDocument extends TermedStatementDocument { /** * Return the ID of the item that the data refers to. * * @return item id */ @Override PropertyIdValue getEntityId(); /** * Get the datatype id of the datatype defined for this property. * * @return {@link DatatypeIdValue} */ DatatypeIdValue getDatatype(); /** * Returns a new version of this document with updated ID. * * @param newEntityId * new ID of the document * @return document with updated ID */ PropertyDocument withEntityId(PropertyIdValue newEntityId); @Override PropertyDocument withRevisionId(long newRevisionId); @Override PropertyDocument withLabel(MonolingualTextValue newLabel); @Override PropertyDocument withDescription(MonolingualTextValue newDescription); @Override PropertyDocument withAliases(String language, List aliases); @Override PropertyDocument withStatement(Statement statement); @Override PropertyDocument withoutStatementIds(Set statementIds); } PropertyIdValue.java000066400000000000000000000037111444772566300356370ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * The id of a Wikibase Property. Objects implementing this interface always * return {@link EntityIdValue#ET_PROPERTY} for * {@link EntityIdValue#getEntityType() getEntityType}. * * @author Markus Kroetzsch * */ public interface PropertyIdValue extends EntityIdValue { /** * Fixed {@link PropertyIdValue} that refers to a non-existing property. Can * be used as a placeholder object in situations where the entity id is * irrelevant. */ PropertyIdValue NULL = new PropertyIdValue() { @Override public String getIri() { return getSiteIri() + getId(); } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String getEntityType() { return ET_PROPERTY; } @Override public String getId() { return "P0"; } @Override public String getSiteIri() { return EntityIdValue.SITE_LOCAL; } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean isPlaceholder() { return true; } }; } PropertyUpdate.java000066400000000000000000000016211444772566300355260ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to property entity. */ public interface PropertyUpdate extends TermedStatementDocumentUpdate { @Override PropertyIdValue getEntityId(); } QuantityValue.java000066400000000000000000000040251444772566300353530ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.math.BigDecimal; /** * A quantity value represents a number, possibly under some unit. The number * can be of arbitrary precision. Additional upper and lower bounds are provided * to allow an interval of uncertainty to be defined. * * @author Markus Kroetzsch * */ public interface QuantityValue extends Value { /** * Returns the main numeric value of this quantity. * * @return numeric value as a decimal value of arbitrary precision */ BigDecimal getNumericValue(); /** * Returns the upper bound for the numeric value of this quantity. * * @return numeric value as a decimal value of arbitrary precision or null if not set */ BigDecimal getLowerBound(); /** * Returns the upper bound for the numeric value of this quantity. * * @return numeric value as a decimal value of arbitrary precision or null if not set */ BigDecimal getUpperBound(); /** * Returns the unit of this quantity, or the string "1" if there is no * unit. Units are typically encoded as entity IRIs. * * @return unit string (IRI or the string "1" if there is no unit) */ String getUnit(); /** * @return the unit of this quantity as an item id value, or null if there is no unit. * @throws IllegalArgumentException if the unit is not "1" (no unit) or a valid item IRI */ ItemIdValue getUnitItemId(); } Reference.java000066400000000000000000000034221444772566300344360ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; import java.util.List; /** * An interface for references in Wikidata. A reference is currently defined by * a list of ValueSnaks, encoding property-value pairs. * * @author Markus Kroetzsch * */ public interface Reference { /** * Get the list of snak groups associated with this reference. Objects of * this class are immutable, and the list should therefore not be * modifiable. * * @return list of SnakGroups */ List getSnakGroups(); /** * Returns an interator over all snaks, without considering snak groups. The * relative order of snaks is preserved. * * @return iterator of snaks */ Iterator getAllSnaks(); /** * Wikibase calculates a hash for each reference based on the content of the reference. * This hash appears in the RDF serialization of the reference. * Since the calculation is hard to reproduce, this is only available if the reference was read * from a dump that contains the hash. * * @return the hash of the reference, if available, otherwise null. */ String getHash(); } SenseDocument.java000066400000000000000000000047561444772566300353270ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Map; import java.util.Set; /** * Interface for lexemes senses. * * @author Thomas Pellissier Tanon * */ public interface SenseDocument extends StatementDocument { /** * Returns the ID of the entity that the data refers to * * @return sense id */ @Override SenseIdValue getEntityId(); /** * Return the human readable description of the sense indexed by Wikimedia language code * * @return a map from Wikimedia language code to the representations */ Map getGlosses(); /** * Returns a new version of this document with updated ID. * * @param newEntityId * new ID of the document * @return document with updated ID */ SenseDocument withEntityId(SenseIdValue newEntityId); /** * Returns a copy of this document with an updated revision id. */ @Override SenseDocument withRevisionId(long newRevisionId); SenseDocument withGloss(MonolingualTextValue gloss); /** * Returns a new version of this document which includes the * statement provided. If the identifier of this statement matches * that of any other statement for the same property, then the * existing statement will be replaced by the new one. Otherwise, * the new statement will be added at the end of the list of statements * in this group. * * @param statement * the statement to add or update in the document */ @Override SenseDocument withStatement(Statement statement); /** * Returns a new version of this document where all statements matching * any of the statement ids provided have been removed. These statements * can use different properties. * * @param statementIds * the identifiers of the statements to remove */ @Override SenseDocument withoutStatementIds(Set statementIds); } SenseIdValue.java000066400000000000000000000042231444772566300350670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import org.wikidata.wdtk.datamodel.helpers.Equality; import org.wikidata.wdtk.datamodel.helpers.Hash; /** * The id of a Wikibase Lexeme Sense. Objects implementing this interface always return * {@link EntityIdValue#ET_SENSE} for {@link EntityIdValue#getEntityType() * getEntityType}. * * @author Thomas Pellissier Tanon * */ public interface SenseIdValue extends EntityIdValue { /** * Returns the id of the lexeme of which it is a form. * * @return A lexeme id */ LexemeIdValue getLexemeId(); /** * Fixed {@link SenseIdValue} that refers to a non-existing sense. It can be * used as a placeholder object in situations where the entity id is irrelevant. */ SenseIdValue NULL = new SenseIdValue() { @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public String getIri() { return getSiteIri() + getId(); } @Override public String getSiteIri() { return EntityIdValue.SITE_LOCAL; } @Override public String getId() { return "L0-S0"; } @Override public String getEntityType() { return ET_SENSE; } @Override public LexemeIdValue getLexemeId() { return LexemeIdValue.NULL; } @Override public boolean equals(Object other) { return Equality.equalsEntityIdValue(this, other); } @Override public int hashCode() { return Hash.hashCode(this); } @Override public boolean isPlaceholder() { return true; } }; } SenseUpdate.java000066400000000000000000000020061444772566300347550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to sense entity. */ public interface SenseUpdate extends StatementDocumentUpdate { @Override SenseIdValue getEntityId(); /** * Returns changes in sense glosses. * * @return update of sense glosses, possibly empty */ TermUpdate getGlosses(); } SiteLink.java000066400000000000000000000030611444772566300342610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.List; /** * A site link in Wikibase specifies a link to an article on another MediaWiki * site, and a list of "badges" that this article holds. Badges are specific * tags used on Wikimedia project sites for some articles, most prominently for * "featured articles". *

* In spite of its name, the site link does not specify a full URL that it links * to. It only provides a page title and a site key that may be used to find a * URL. To do this, the site links need to be resolved using a {@link Sites} * object. * * @author Markus Kroetzsch * */ public interface SiteLink { /** * Get the string title of the linked page. */ String getPageTitle(); /** * Get the string key of the linked site. */ String getSiteKey(); /** * Get the list of badges of the linked article. */ List getBadges(); } Sites.java000066400000000000000000000132211444772566300336250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Registry to manage the association between site keys (such as "enwiki") and * base URLs (such as "http://en.wikipedia.org/wiki/") that is needed to * interpret {@link SiteLink} objects. These associations are part of the * configuration of a MediaWiki site and therefore not fixed. *

* This is not a Wikibase data object as such, but part of the general * configuration of a Wikibase site. The interface supports modification, e.g., * to insert additional associations. This can be useful to augment data * manually (even when loading most of the data from a file dump). For example, * some of Wikimedia's data exports are more frequent than their sites table * exports, so it might be useful to add some very recent sites. * * @author Markus Kroetzsch * */ public interface Sites { /** * Sets the stored information for the site of the given key to the given * values. *

* Note that the path URLs given here should be absolute. In MediaWiki, it * is common to use protocol-relative paths (starting with "//" rather than * with "http://" or "https://"). The code in this class is not prepared to * handle this yet (URL-returning methods would need to allow for a * preferred protocol to be specified). * * @param siteKey * the global site key, e.g., "enwiki" or "fawikivoyage" * @param group * the site group, e.g., "wikipedia" or "wikivoyage" * @param languageCode * the site MediaWiki language code, e.g., "en" or "fa" * @param siteType * the site type, typically "mediawiki" * @param filePath * the file path with $1 as a placeholder for the file name, * e.g., "http://en.wikipedia.org/w/$1" or * "http://fa.wikivoyage.org/w/$1" * @param pagePath * the page path with $1 as a placeholder for the page title, * e.g., "http://en.wikipedia.org/wiki/$1" or * "http://fa.wikivoyage.org/wiki/$1" */ void setSiteInformation(String siteKey, String group, String languageCode, String siteType, String filePath, String pagePath); /** * Returns the MediaWiki language code for the given site, or null if there * is no such data for this site key. *

* The language code follows the MediaWiki conventions for language codes, * which do not follow any standard. Most codes agree with those in BCP 47 , but there * are a number of exceptions. * * @param siteKey * the global site key * @return the corresponding MediaWiki language code, or null if not known */ String getLanguageCode(String siteKey); /** * Returns the group for the given site, or null if there is no such data * for this site key. The group is a string identifier used for * configuration purposes. Typical groups on Wikimedia sites include * "wikipedia", "wikisource", "wikivoyage", and "wikiquote", used for most * sites of these projects, but also singleton groups like "commons" and * "wikimania2013". * * @param siteKey * the global site key * @return the corresponding group, or null if not known */ String getGroup(String siteKey); /** * Returns the URL for the page of the given name, or null if the site is * not known. All characters in the page title will be escaped for use in * URLs. * * @param siteKey * the global site key * @param pageTitle * the title of the page, including namespace prefixes if any * @return the URL to link to this page on the given site, or null if not * known */ String getPageUrl(String siteKey, String pageTitle); /** * Returns the URL for the given site link, or null if its site key is not * known. * * @param siteLink * the SiteLink object * @return the page URL for this site link, or null if not known */ String getSiteLinkUrl(SiteLink siteLink); /** * Returns the URL for the file of the given name, or null if the site is * not known. The file name is not escaped for use in URLs, so that * one can use this method to construct URLs with parameters, e.g., when * calling the API of the site. Also note that this method does not * construct URLs for files uploaded to a MediaWiki site using the given * file name; such files are usually placed in some subdirectory. * * @param siteKey * the global site key * @param fileName * the name of the file * @return the URL to link to this page on the given site, or null if not * known */ String getFileUrl(String siteKey, String fileName); /** * Returns the type for the given site, or null if there is no such data for * this site key. For MediaWiki sites, this is "mediawiki". * * @param siteKey * the global site key * @return the corresponding type, or null if not known */ String getSiteType(String siteKey); } Snak.java000066400000000000000000000027101444772566300334330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Snaks are the basic information structures used to describe Entities in * Wikibase. They are an integral part of each {@link Statement} (which can be * viewed as collection of Snaks about an Entity, together with a list of * references). *

* The most basic (and most common) form of Snaks are property-value pairs, but * other forms exist. * * @author Markus Kroetzsch * */ public interface Snak { /** * Get the id of the property that this snak refers to. * * @return PropertyId of this Snak */ PropertyIdValue getPropertyId(); /** * Accept a SnakVisitor and return its output. * * @param snakVisitor * the SnakVisitor * @return output of the visitor */ T accept(SnakVisitor snakVisitor); } SnakGroup.java000066400000000000000000000022761444772566300344570ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Collection; import java.util.List; /** * A snak group represents an ordered list of {@link Snak} objects that use the * same property. * * @author Markus Kroetzsch * */ public interface SnakGroup extends Collection { /** * Get the list of Snaks of this group. * * @return a list of Snaks */ List getSnaks(); /** * Get the property used by each snak in this group. * * @return a PropertyIdValue */ PropertyIdValue getProperty(); }SnakVisitor.java000066400000000000000000000030141444772566300350110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A visitor for the various types of snaks in the datamodel. This should be * used to avoid any type casting or instanceof checks when processing snaks. * * @author Markus Kroetzsch * * @param * the return type of the visitor */ public interface SnakVisitor { /** * Visits a ValueSnak and returns a result. * * @param snak * the snak to visit * @return the result for this snak */ T visit(ValueSnak snak); /** * Visits a SomeValueSnak and returns a result. * * @param snak * the snak to visit * @return the result for this snak */ T visit(SomeValueSnak snak); /** * Visits a NoValueSnak and returns a result. * * @param snak * the snak to visit * @return the result for this snak */ T visit(NoValueSnak snak); } SomeValueSnak.java000066400000000000000000000017551444772566300352640ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A SomeValueSnak represents the information that a given property has some * value that is not speficied further. In modelling, this is normally used if * the specific value is unknown. * * @author Markus Kroetzsch * */ public interface SomeValueSnak extends Snak { } Statement.java000066400000000000000000000104741444772566300345110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; import java.util.List; /** * Interface for Wikibase Statments. A Statement is the main information object * entered by users in Wikidata. It refers to a {@link Claim}, on which it * provides additional information about references and ranking. * * @author Markus Kroetzsch * */ public interface Statement { /** * Get the Claim object that this statement refers to. * * @return the claim that this statement refers to */ Claim getClaim(); /** * The subject that the claim refers to, e.g., the id of "Berlin". * * @return EntityId of the subject */ EntityIdValue getSubject(); /** * Main Snak of the statement. This Snak refers directly to the subject, * e.g., the {@link ValueSnak} "Population: 3000000". * * @return the main snak */ Snak getMainSnak(); /** * Groups of auxiliary Snaks, also known as qualifiers, that provide * additional context information for this claim. For example, "as of: 2014" * might be a temporal context given for a claim that provides a population * number. The snaks are grouped by the property that they use. * * @return list of snak groups */ List getQualifiers(); /** * Returns an iterator over all qualifiers, without considering qualifier * groups. The relative order of qualifiers is preserved. * * @return iterator over all qualifier snaks */ Iterator getAllQualifiers(); /** * @see StatementRank * @return the rank of the Statement */ StatementRank getRank(); /** * Get a list of references for this Statement. Each reference is * represented by a list of Snaks, which provide information about the * reference. * * @return the list of references */ List getReferences(); /** * Return the id used to identify this statement. *

* Statement ids are used by Wikibase to allow certain interactions though * the API, especially the atomic modification of Statements (modifications * of statements can be viewed as deletions followed by insertions, but * doing this in several steps though the API is not practical). In the * current Wikibase implementation, the id is a string that begins with the * (sometimes lowercased) local ID of the subject of the statement, followed * by a dollar sign and a randomly generated UUID. Thus statements of * different subjects can never have the same id, and it is extremely * unlikely that two statements of the one subject ever have the same id. * However, it is possible that two statements with the same content differ * in their id, since the id is not based on the content. *

* Wikidata Toolkit generally requires ids to be specified but you can use * the empty string to indicate that a statement has no id. This will also * be respected when serializing data as JSON, i.e., rather than setting the * statement id to an empty string in JSON, the key will simply be omitted. * This is useful for creating new statements through the API. *

* Callers should not make any assumptions about the stability of statement * ids over time, or about the internal format of the ids. * * @return the statement string id */ String getStatementId(); /** * Convenience method to get the value of the statement's claim's main snak, * or null if there is none. * * @return main value of the statement, or null */ Value getValue(); /** * Returns the same statement, but with a different identifier. * This is useful when the existing identifier was empty and we need to * attribute one before creating the statement in a remote Wikibase instance. */ Statement withStatementId(String id); } StatementDocument.java000066400000000000000000000522251444772566300362100ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.util.NestedIterator; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; /** * Interface for EntityDocuments that can have statements. * * @author Markus Kroetzsch */ public interface StatementDocument extends EntityDocument { /** * Return the list of all StatementGroups stored for this item. The order of * StatementGroups is significant. * * @return list of StatementGroups */ List getStatementGroups(); /** * Returns an iterator that provides access to all statements, without * considering the statement groups. The order of statements is preserved. * * @return iterator over all statements */ default Iterator getAllStatements() { return new NestedIterator<>(getStatementGroups()); } /** * Returns the {@link StatementGroup} for the given property, or null if * there are no statements for this property. This is a convenience method * for accessing the data that can be obtained via * {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link StatementGroup} or null */ default StatementGroup findStatementGroup(PropertyIdValue propertyIdValue) { for (StatementGroup sg : getStatementGroups()) { if (propertyIdValue.equals(sg.getProperty())) { return sg; } } return null; } /** * Returns the {@link StatementGroup} for the given property, or null if * there are no statements for this property. Only the string id of the * property is compared, not the site id. This is useful in situations where * all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link StatementGroup} or null */ default StatementGroup findStatementGroup(String propertyId) { for (StatementGroup sg : getStatementGroups()) { if (propertyId.equals(sg.getProperty().getId())) { return sg; } } return null; } /** * Returns true if there is a statement for the given property. This is a * convenience method for accessing the data that can be obtained via * {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return true if a statement for this property exists */ default boolean hasStatement(PropertyIdValue propertyIdValue) { return findStatementGroup(propertyIdValue) != null; } /** * Returns true if there is a statement for the given property. Only the * string id of the property is compared, not the site id. This is useful in * situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return true if a statement for this property exists */ default boolean hasStatement(String propertyId) { return findStatementGroup(propertyId) != null; } /** * Returns true if there is a statement for the given property and value. * This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @param value * the value to search * @return true if a statement for this property and value exists */ default boolean hasStatementValue(PropertyIdValue propertyIdValue, Value value) { return hasStatementValue(propertyIdValue, Collections.singleton(value)); } /** * Returns true if there is a statement for the given property and value. * Only the string id of the property is compared, not the site id. This is * useful in situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @param value * the value to search * @return true if a statement for this property and value exists */ default boolean hasStatementValue(String propertyId, Value value) { return hasStatementValue(propertyId, Collections.singleton(value)); } /** * Returns true if there is a statement for the given property and one of * the given values. This is a convenience method for accessing the data * that can be obtained via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @param values * the set of values to search * @return true if a statement for this property and value exists */ default boolean hasStatementValue(PropertyIdValue propertyIdValue, Set values) { StatementGroup statementGroup = findStatementGroup(propertyIdValue); if(statementGroup == null) { return false; } for (Statement statement : statementGroup) { if (values.contains(statement.getValue())) { return true; } } return false; } /** * Returns true if there is a statement for the given property and one of * the given values. Only the string id of the property is compared, not the * site id. This is useful in situations where all data is known to come * from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @param values * the set of values to search * @return true if a statement for this property and value exists */ default boolean hasStatementValue(String propertyId, Set values) { StatementGroup statementGroup = findStatementGroup(propertyId); if(statementGroup == null) { return false; } for (Statement statement : statementGroup) { if (values.contains(statement.getValue())) { return true; } } return false; } /** * Returns the unique {@link Statement} for the given property, or null if * there are zero or many statements for this property. This is a * convenience method for accessing the data that can be obtained via * {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link Statement} or null */ default Statement findStatement(PropertyIdValue propertyIdValue) { StatementGroup statementGroup = findStatementGroup(propertyIdValue); return (statementGroup != null && statementGroup.size() == 1) ? statementGroup.getStatements().get(0) : null; } /** * Returns the unique {@link Statement} for the given property, or null if * there are zero or many statements for this property. Only the string id * of the property is compared, not the site id. This is useful in * situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link Statement} or null */ default Statement findStatement(String propertyId) { StatementGroup statementGroup = findStatementGroup(propertyId); return (statementGroup != null && statementGroup.size() == 1) ? statementGroup.getStatements().get(0) : null; } /** * Returns the unique {@link Value} for the given property, or null if there * are zero or many values given in statements for this property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link Value} or null */ default Value findStatementValue(PropertyIdValue propertyIdValue) { Statement statement = findStatement(propertyIdValue); return (statement != null) ? statement.getValue() : null; } /** * Returns the unique {@link Value} for the given property, or null if there * are zero or many values given in statements for this property. Only the * string id of the property is compared, not the site id. This is useful in * situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link Value} or null */ default Value findStatementValue(String propertyId) { Statement statement = findStatement(propertyId); return (statement != null) ? statement.getValue() : null; } /** * Returns the unique {@link StringValue} for the given property, or null if * there are zero or many such values given in statements for this property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link StringValue} or null */ default StringValue findStatementStringValue(PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof StringValue ? (StringValue) value : null; } /** * Returns the unique {@link StringValue} for the given property, or null if * there are zero or many such values given in statements for this property. * Only the string id of the property is compared, not the site id. This is * useful in situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link StringValue} or null */ default StringValue findStatementStringValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof StringValue ? (StringValue) value : null; } /** * Returns the unique {@link QuantityValue} for the given property, or null * if there are zero or many such values given in statements for this * property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link QuantityValue} or null */ default QuantityValue findStatementQuantityValue(PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof QuantityValue ? (QuantityValue) value : null; } /** * Returns the unique {@link QuantityValue} for the given property, or null * if there are zero or many such values given in statements for this * property. Only the string id of the property is compared, not the site * id. This is useful in situations where all data is known to come from a * single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link QuantityValue} or null */ default QuantityValue findStatementQuantityValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof QuantityValue ? (QuantityValue) value : null; } /** * Returns the unique {@link GlobeCoordinatesValue} for the given property, * or null if there are zero or many such values given in statements for * this property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link GlobeCoordinatesValue} or null */ default GlobeCoordinatesValue findStatementGlobeCoordinatesValue( PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof GlobeCoordinatesValue ? (GlobeCoordinatesValue) value : null; } /** * Returns the unique {@link GlobeCoordinatesValue} for the given property, * or null if there are zero or many such values given in statements for * this property. Only the string id of the property is compared, not the * site id. This is useful in situations where all data is known to come * from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link GlobeCoordinatesValue} or null */ default GlobeCoordinatesValue findStatementGlobeCoordinatesValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof GlobeCoordinatesValue ? (GlobeCoordinatesValue) value : null; } /** * Returns the unique {@link TimeValue} for the given property, or null if * there are zero or many such values given in statements for this property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link TimeValue} or null */ default TimeValue findStatementTimeValue(PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof TimeValue ? (TimeValue) value : null; } /** * Returns the unique {@link TimeValue} for the given property, or null if * there are zero or many such values given in statements for this property. * Only the string id of the property is compared, not the site id. This is * useful in situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link TimeValue} or null */ default TimeValue findStatementTimeValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof TimeValue ? (TimeValue) value : null; } /** * Returns the unique {@link MonolingualTextValue} for the given property, * or null if there are zero or many such values given in statements for * this property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link MonolingualTextValue} or null */ default MonolingualTextValue findStatementMonolingualTextValue( PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof MonolingualTextValue ? (MonolingualTextValue) value : null; } /** * Returns the unique {@link MonolingualTextValue} for the given property, * or null if there are zero or many such values given in statements for * this property. Only the string id of the property is compared, not the * site id. This is useful in situations where all data is known to come * from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link MonolingualTextValue} or null */ default MonolingualTextValue findStatementMonolingualTextValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof MonolingualTextValue ? (MonolingualTextValue) value : null; } /** * Returns the unique {@link ItemIdValue} for the given property, or null if * there are zero or many such values given in statements for this property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link ItemIdValue} or null */ default ItemIdValue findStatementItemIdValue(PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof ItemIdValue ? (ItemIdValue) value : null; } /** * Returns the unique {@link ItemIdValue} for the given property, or null if * there are zero or many such values given in statements for this property. * Only the string id of the property is compared, not the site id. This is * useful in situations where all data is known to come from a single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link ItemIdValue} or null */ default ItemIdValue findStatementItemIdValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof ItemIdValue ? (ItemIdValue) value : null; } /** * Returns the unique {@link PropertyIdValue} for the given property, or * null if there are zero or many such values given in statements for this * property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link PropertyIdValue} or null */ default PropertyIdValue findStatementPropertyIdValue(PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof PropertyIdValue ? (PropertyIdValue) value : null; } /** * Returns the unique {@link PropertyIdValue} for the given property, or * null if there are zero or many such values given in statements for this * property. Only the string id of the property is compared, not the site * id. This is useful in situations where all data is known to come from a * single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link PropertyIdValue} or null */ default PropertyIdValue findStatementPropertyIdValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof PropertyIdValue ? (PropertyIdValue) value : null; } /** * Returns the unique {@link EntityIdValue} for the given property, or null * if there are zero or many such values given in statements for this * property. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyIdValue * the property to search for * @return {@link EntityIdValue} or null */ default EntityIdValue findStatementEntityIdValue(PropertyIdValue propertyIdValue) { Value value = findStatementValue(propertyIdValue); return value instanceof EntityIdValue ? (EntityIdValue) value : null; } /** * Returns the unique {@link EntityIdValue} for the given property, or null * if there are zero or many such values given in statements for this * property. Only the string id of the property is compared, not the site * id. This is useful in situations where all data is known to come from a * single site. *

* This is a convenience method for accessing the data that can be obtained * via {@link #getStatementGroups()}. * * @param propertyId * the property to search for * @return {@link EntityIdValue} or null */ default EntityIdValue findStatementEntityIdValue(String propertyId) { Value value = findStatementValue(propertyId); return value instanceof EntityIdValue ? (EntityIdValue) value : null; } /** * Returns a copy of this document with an updated revision id. */ @Override StatementDocument withRevisionId(long newRevisionId); /** * Returns a new version of this document which includes the * statement provided. If the identifier of this statement matches * that of any other statement for the same property, then the * existing statement will be replaced by the new one. Otherwise, * the new statement will be added at the end of the list of statements * in this group. * * @param statement * the statement to add or update in the document */ StatementDocument withStatement(Statement statement); /** * Returns a new version of this document where all statements matching * any of the statement ids provided have been removed. These statements * can use different properties. * * @param statementIds * the identifiers of the statements to remove */ StatementDocument withoutStatementIds(Set statementIds); } StatementDocumentUpdate.java000066400000000000000000000020351444772566300373450ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to an entity that has statements. * * @see StatementUpdate */ public interface StatementDocumentUpdate extends EntityUpdate { /** * Returns statement changes included in this update. * * @return statement update, possibly empty */ StatementUpdate getStatements(); } StatementGroup.java000066400000000000000000000042101444772566300355150ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Collection; import java.util.List; /** * A statement group represents an ordered list of {@link Statement} objects * that use the same subject and the same property in the main snak of their * {@link Claim}. * * @author Markus Kroetzsch * */ public interface StatementGroup extends Collection { /** * Get the list of Statements of this group. * * @return a list of Statements */ List getStatements(); /** * Get the best statements of this group. * These are the statements with rank {@link StatementRank#PREFERRED} * if they exists or the one with rank {@link StatementRank#NORMAL} * * @return a subset of the current StatementGroup, or null if there are no best statements */ StatementGroup getBestStatements(); /** * Get the property used in the main snak of the {@link Claim} of each * statement in this group. * * @return a PropertyIdValue */ PropertyIdValue getProperty(); /** * Get the subject used in the {@link Claim} of each statement in this * group. * * @return an EntityIdValue */ EntityIdValue getSubject(); /** * Returns a new version of this group, where the given * statement has been added. If some statement in the group * has the same non-empty statement id, then the new statement * will replace it. Otherwise the new statement is appended * at the end of the list. */ StatementGroup withStatement(Statement statement); } StatementRank.java000066400000000000000000000030551444772566300353220ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Enum for the possible ranks of Wikibase Statements. Ranks are used to compare * Statements that have the same subject and main-snak property. *

* By default, Statements are of "normal" rank. The rank "preferred" can be * given to Statements that should be preferred when using the data without more * specific selection criteria (for example, there can be many population * numbers for one city, but only the most current/accurate one should be shown * by default, hence it should be preferred). The rank "deprecated" is used for * Statements that should not normally be considered, but which are still stored * for some reason (maybe because their status is disputed or because they * record a known wrong claim of a respected source). * * @author Markus Kroetzsch * */ public enum StatementRank { PREFERRED, NORMAL, DEPRECATED } StatementUpdate.java000066400000000000000000000035331444772566300356520ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import org.wikidata.wdtk.datamodel.implementation.StatementUpdateImpl; /** * Collection of statement changes. * * @see StatementDocumentUpdate */ public interface StatementUpdate { /** * Empty update that does not alter or add any statements. */ StatementUpdate EMPTY = new StatementUpdateImpl( Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); /** * Checks whether the update is empty. Empty update will not change or remove * any statements. * * @return {@code true} if the update is empty, {@code false} otherwise */ boolean isEmpty(); /** * Returns statements added to the entity in this update. * * @return list of added statements */ List getAdded(); /** * Returns entity statements modified in this update. * * @return modified statements indexed by statement ID */ Map getReplaced(); /** * Returns IDs of statements removed from the entity in this update. * * @return list of IDs of removed statements */ Set getRemoved(); } StringValue.java000066400000000000000000000017001444772566300350000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * String Values represent a single string. * * @author Markus Kroetzsch * */ public interface StringValue extends Value { /** * Get the string stored in this value. * * @return string value */ String getString(); } TermUpdate.java000066400000000000000000000033101444772566300346060ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Collections; import java.util.Map; import java.util.Set; import org.wikidata.wdtk.datamodel.implementation.TermUpdateImpl; /** * Collection of changes made to terms (labels, descriptions, ...). */ public interface TermUpdate { /** * Empty update that does not alter or remove any terms. */ TermUpdate EMPTY = new TermUpdateImpl(Collections.emptyList(), Collections.emptyList()); /** * Checks whether the update is empty. Empty update will not change or remove * any terms. * * @return {@code true} if the update is empty, {@code false} otherwise */ boolean isEmpty(); /** * Returns terms added or modified in this update. Existing terms are preserved * if their language code is not listed here. * * @return added or modified terms indexed by language code */ Map getModified(); /** * Returns language codes of terms removed in this update. * * @return language codes of removed terms */ Set getRemoved(); } TermedDocument.java000066400000000000000000000056331444772566300354650ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.List; import java.util.Map; /** * Interface for EntityDocuments that can be described by terms in several * languages. These terms consist labels, descriptions, and aliases. * * @author Markus Kroetzsch */ public interface TermedDocument extends LabeledDocument { /** * Return a Map from Wikibase language codes to descriptions. * * @return the map of descriptions */ Map getDescriptions(); /** * Return a Map from Wikibase language codes to lists of alias labels for a * given language. * * @return the aliases for this language */ Map> getAliases(); /** * Returns the string description for the given language code, or null if * there is no description for this code. This is a convenience method for * accessing the data that can be obtained via {@link #getDescriptions()}. * * @param languageCode * a string that represents language * @return the description string or null if it does not exists */ default String findDescription(String languageCode) { MonolingualTextValue value = this.getDescriptions().get(languageCode); return (value != null) ? value.getText() : null; } /** * Returns a copy of this document with an updated revision id. */ @Override TermedDocument withRevisionId(long newRevisionId); /** * Returns a new version of this document with a new label * (which overrides any existing label for this language). */ @Override TermedDocument withLabel(MonolingualTextValue newLabel); /** * Returns a new version of this document with a new description * (which overrides any existing description). */ TermedDocument withDescription(MonolingualTextValue newDescription); /** * Returns a new version of this document with a new list of aliases * for the given language code. Any existing aliases for this language * will be discarded. * * @param language * the language code for which the aliases should be set * @param aliases * the aliases to set for this language. The language codes they * contain should all match the supplied language. */ TermedDocument withAliases(String language, List aliases); } TermedDocumentUpdate.java000066400000000000000000000025041444772566300366220ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import java.util.Map; /** * Collection of changes that can be applied to an entity that has labels, * aliases, and descriptions. */ public interface TermedDocumentUpdate extends LabeledDocumentUpdate { /** * Returns changes in entity descriptions. * * @return update of entity descriptions, possibly empty */ TermUpdate getDescriptions(); /** * Returns changes in entity aliases. All {@link AliasUpdate} instances are * non-empty. If language code is not in the returned map, aliases for that * language do not change. * * @return changes in aliases */ Map getAliases(); } TermedStatementDocument.java000066400000000000000000000031751444772566300373510ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.List; import java.util.Set; /** * This interface just joins {@link TermedDocument} and {@link StatementDocument}. * * It is necessary to introduce this interface because the conflict between * the return types of the withRevisionId method in both interfaces needs to be resolved. * @author antonin * */ public interface TermedStatementDocument extends TermedDocument, LabeledStatementDocument { @Override TermedStatementDocument withRevisionId(long newRevisionId); @Override TermedStatementDocument withLabel(MonolingualTextValue newLabel); @Override TermedStatementDocument withDescription(MonolingualTextValue newDescription); @Override TermedStatementDocument withAliases(String language, List aliases); @Override TermedStatementDocument withStatement(Statement statement); @Override TermedStatementDocument withoutStatementIds(Set statementIds); } TermedStatementDocumentUpdate.java000066400000000000000000000016771444772566300405210ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; /** * Collection of changes that can be applied to an entity that has labels, * aliases, descriptions, and statements. */ public interface TermedStatementDocumentUpdate extends TermedDocumentUpdate, LabeledStatementDocumentUpdate { } TimeValue.java000066400000000000000000000247251444772566300344440ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Time values represent points and intervals in time, and additional * information about their format. Information includes a specific time point, * information about its precision, and the preferred calendar model and * timezone (for display). Moreover, time values can describe some uncertainty * regarding the exact position in time. This is achieved by tolerance values * that specify how much {@link #getBeforeTolerance() before} or * {@link #getBeforeTolerance() after} the given time point an event might have * occurred. *

* Time points cannot describe durations (which are quantities), recurring * events ("1st of May"), or time spans ( * "He reigned from 1697 to 1706, i.e., during every moment of that time span" * ). Intervals expressed by times always encode uncertainty ("He died in the * year 546 BCE, i.e., in some moment within that interval"). *

* The main time point of the value generally refers to the proleptic Gregorian * calendar. However, if dates are imprecise (like "Jan 1512" or even "1200") * then one cannot convert this reliably and Wikidata will just keep the value * as entered. *

* "Y0K issue": Neither the Gregorian nor the Julian calendar assume a year 0, * i.e., the year 1 BCE was followed by 1 CE in these calendars. See http://en.wikipedia.org/wiki/Year_zero. Wikibase internally uses the * year 0. This is the same as ISO-8601, where 1 BCE is represented as "0000". * However, note that XML Schema dates (1.0 and 2.0) do not have a year 0, so in * their case 1BCE is represented as "-1". Understanding the difference is * relevant for computing leap years, for computing temporal intervals, and for * exporting data. *

* Timezone information is to be given in the form of a positive or negative * offset with respect to UTC, measured in minutes. This information specifies * the timezone that the time should be displayed in when shown to a user. The * recorded time point is in UTC, so timezone can be ignored for comparing * values. See {@link #getTimezoneOffset()}. * * @author Markus Kroetzsch * */ public interface TimeValue extends Value { /** * IRI of the proleptic Gregorian calendar; often used to specify the * calendar model */ String CM_GREGORIAN_PRO = "http://www.wikidata.org/entity/Q1985727"; /** * IRI of the proleptic Julian calendar; often used to specify the calendar * model */ String CM_JULIAN_PRO = "http://www.wikidata.org/entity/Q1985786"; /** * Precision constant for dates that are precise to the second. */ byte PREC_SECOND = 14; /** * Precision constant for dates that are precise to the minute. */ byte PREC_MINUTE = 13; /** * Precision constant for dates that are precise to the hour. */ byte PREC_HOUR = 12; /** * Precision constant for dates that are precise to the day. */ byte PREC_DAY = 11; /** * Precision constant for dates that are precise to the month. */ byte PREC_MONTH = 10; /** * Precision constant for dates that are precise to the year. */ byte PREC_YEAR = 9; /** * Precision constant for dates that are precise to the decade. */ byte PREC_DECADE = 8; /** * Precision constant for dates that are precise to 100 years. */ byte PREC_100Y = 7; /** * Precision constant for dates that are precise to 1,000 years. */ byte PREC_1KY = 6; /** * Precision constant for dates that are precise to 10,000 years. */ byte PREC_10KY = 5; /** * Precision constant for dates that are precise to 100,000 years. */ byte PREC_100KY = 4; /** * Precision constant for dates that are precise to 1 million years. */ byte PREC_1MY = 3; /** * Precision constant for dates that are precise to 10 million years. */ byte PREC_10MY = 2; /** * Precision constant for dates that are precise to 100 million years. */ byte PREC_100MY = 1; /** * Precision constant for dates that are precise to 10^9 years. */ byte PREC_1GY = 0; /** * Get the year stored for this date. Years in Wikibase can be 0; see "Y0K" * issue in the interface documentation. * * @return year number */ long getYear(); /** * Get the month stored for this date. It will be a number from 1 to 12. * * @return month number */ byte getMonth(); /** * Get the day stored for this date. It will be a number from 1 to 31. * * @return day number */ byte getDay(); /** * Get the hour stored for this date. It will be a number from 0 to 23. * * @return hour number */ byte getHour(); /** * Get the minute stored for this date. It will be a number from 0 to 59. * * @return minute number */ byte getMinute(); /** * Get the seconds stored for this date. The value will be between 0 and 60 * (inclusive) to account for leap seconds. Implementations are not expected * to validate leap seconds but they should provide consistent ordering: the * time 23:59:60 is always before 00:00:00 on the next day. * * @return second number */ byte getSecond(); /** * Get the IRI of the preferred calendar model that should be used to * display this date (and that was presumably used when entering it). This * is usually {@link TimeValue#CM_GREGORIAN_PRO} or * {@link TimeValue#CM_JULIAN_PRO}. * * @return IRI of the preferred calendar model */ String getPreferredCalendarModel(); /** * Get the {@link ItemIdValue} of the preferred calendar model that should * be used to display this date (and that was presumably used when entering it). * * @throws IllegalArgumentException if the calendar model is not a valid item IRI */ ItemIdValue getPreferredCalendarModelItemId(); /** * Get the precision hint of this date. The return value will be in the * range of {@link TimeValue#PREC_DAY}, ..., {@link TimeValue#PREC_1GY}. * * @return precision hint for this date */ byte getPrecision(); /** * Get the offset in minutes from UTC that should be applied when displaying * this time to users. The recorded time point is always in UTC, so the * timezone can be ignored for comparing values. The offset should be * added to the given time to obtain the intended local value. For * example, an offset of +60 and a time of 10:45:00 should be displayed as * 11:45:00 to the user (ideally with some indication of the shift; time * zone abbreviations like "CET" could be used when matching the given * offset, but the offset might also have values that do not correspond to * any current or modern time zone). Therefore positive offsets are used for * timezones that to the east of the prime meridian. * * @return minute number (positive or negative) */ int getTimezoneOffset(); /** * Get a tolerance value that specifies how much earlier in time the value * could at most be, measured as a multiple of {@link #getPrecision() * precision}. The value is a non-negative integer. *

* For example, for the date 2007-05-12T10:45:00 with precision * {@link TimeValue#PREC_MONTH}, a before-tolerance value of 3 means that * the earliest possible time of this event could have been * 2007-02-12T10:45:00. This information about the uncertainty of time * points can be taken into account in query answering, but simplified * implementations can also ignore it and work with the given (exact) time * point instead. If not set specifically by the user, the before-tolerance * value should be 0, i.e., the given time point marks the earliest possible * time. *

* This boundary is inclusive. For example, a date 2014-02-17T00:00:00 with * precision {@link TimeValue#PREC_DAY} and before-tolerance value 1 * specifies a time that between 2014-02-17T00:00:00 * * @see TimeValue#getAfterTolerance() * * @return a non-negative integer tolerance measured in terms of precision */ int getBeforeTolerance(); /** * Get a tolerance value that specifies how much later in time the value * could at most be, measured as a multiple of {@link #getPrecision() * precision}. The value is a positive integer. *

* For example, for the date 2007-05-12T10:45:00 with precision * {@link TimeValue#PREC_MONTH}, an after-tolerance value of 2 means that * the latest possible time of this event could have been strictly before * 2007-07-12T10:45:00. This information about the uncertainty of time * points can be taken into account in query answering, but simplified * implementations can also ignore it and work with the given (exact) time * point instead. If not set specifically by the user, the after-tolerance * value should be 1, i.e., the interval of uncertainty is exactly the * length given by precision. However, because most (if not all) other * known implementations of the data model got this detail wrong and use 0 * instead, we are also using 0 as a default value. This issue is tracked * at https://phabricator.wikimedia.org/T194869. *

* The boundary is exclusive. For example, a date 2013-02-01T00:00:00 with * precision {@link TimeValue#PREC_MONTH} and after-tolerance value 1 and * before-tolerance value of 0 specifies a time "sometime in February 2013", * but it excludes any time in March 2013. The after-tolerance must not be 0 * (which would make no sense if the bound is exclusive, and which is not * needed since precision up to a single second can be specified anyway). * * @see TimeValue#getBeforeTolerance() * * @return a non-zero, positive integer tolerance measured in terms of * precision */ int getAfterTolerance(); /** * Convert the value to the Gregorian calendar, if possible. * This conversion can fail if not enough information is available * (for example, we need at least day precision to convert from Julian to Gregorian). * * @return a TimeValue that uses the Gregorian calendar, or null if the conversion failed. */ TimeValue toGregorian(); } UnsupportedEntityIdValue.java000066400000000000000000000023071444772566300375400ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Represents a entity id value of an unsupported type. * We can still "deserialize" it by just storing its * JSON representation, so that it can be serialized * back to its original representation. * This avoids parsing failures on documents containing * these values. * * @author Antonin Delpeuch */ public interface UnsupportedEntityIdValue extends EntityIdValue { /** * The type of entity as represented in the JSON serialization. */ String getEntityTypeJsonString(); } UnsupportedValue.java000066400000000000000000000024251444772566300360670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Represents a value with an unsupported datatype. * We can still "deserialize" it by just storing its * JSON representation, so that it can be serialized * back to its original representation. * This avoids parsing failures on documents containing * these values. * * @author Antonin Delpeuch */ public interface UnsupportedValue extends Value { /** * Returns the type string found in the JSON representation * of this value. * * @return * the value of "type" in the JSON representation of this value. */ String getTypeJsonString(); } Value.java000066400000000000000000000025061444772566300336160ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A Value is the most general kind of object in the Wikibase datamodel. It can * represent anything that can be the value of a user-defined property or of a * system property that is not represented to the user (e.g., the datatype or * list of aliases might be represented as a value, even though there is no user * property with values of this type). * * @author Markus Kroetzsch * */ public interface Value { /** * Accept a ValueVisitor and return its output. * * @param valueVisitor * the ValueVisitor * @return output of the visitor */ T accept(ValueVisitor valueVisitor); } ValueSnak.java000066400000000000000000000020171444772566300344300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * ValueSnaks represent property-value pairs, where the property are represented * by a {@link PropertyIdValue} and the value is represented by a {@link Value}. * * @author Markus Kroetzsch * */ public interface ValueSnak extends Snak { /** * Get the {@link Value} of this Snak */ Value getValue(); } ValueVisitor.java000066400000000000000000000055251444772566300352020ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A visitor for the various types of values in the datamodel. This should be * used to avoid any type casting or instanceof checks when processing values. *

* The visitor does not distinguish several types of {@link EntityIdValue}, * since these are supposed to be extensible and therefore cannot be fixed in a * visitor interface. * * @author Markus Kroetzsch * * @param * the return type of the visitor */ public interface ValueVisitor { /** * Visits a EntityIdValue and returns a result. In practice, only specific * subtypes of EntityIdValue are used, such as {@link ItemIdValue} and * {@link PropertyIdValue}. Since the set of possible subtypes can be * extended by extensions of Wikibase, the visitor only visits the general * (abstract) supertype. Implementations will have to decide if the given * specific type is supported and what to do with it. * * @param value * the value to visit * @return the result for this value */ T visit(EntityIdValue value); /** * Visits a GlobeCoordinatesValue and returns a result. * * @param value * the value to visit * @return the result for this value */ T visit(GlobeCoordinatesValue value); /** * Visits a MonolingualTextValue and returns a result. * * @param value * the value to visit * @return the result for this value */ T visit(MonolingualTextValue value); /** * Visits a QuantityValue and returns a result. * * @param value * the value to visit * @return the result for this value */ T visit(QuantityValue value); /** * Visits a StringValue and returns a result. * * @param value * the value to visit * @return the result for this value */ T visit(StringValue value); /** * Visits a TimeValue and returns a result. * * @param value * the value to visit * @return the result for this value */ T visit(TimeValue value); /** * Visits an UnsupportedValue and returns a result * * @param value * the value to visit * @return the result for this value */ T visit(UnsupportedValue value); } WikimediaLanguageCodes.java000066400000000000000000000577361444772566300371060ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfacespackage org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.HashMap; import java.util.Map; /** * This class helps to interpret Wikimedia language codes in terms of official * BCP 47 language * codes. Unforatunately, the two systems don't agree in all cases. This class * incorporates several exceptions, where Wikimedia uses non-standard language * codes, including but not limited to the documented exceptional * language codes. When available, the IANA-registered codes are used, but in some cases new codes are * constructed according to the standard rules. * * @author Markus Kroetzsch * */ public class WikimediaLanguageCodes { private static Map LANGUAGE_CODES = new HashMap<>(); static { LANGUAGE_CODES.put("aa", "aa"); // Afar LANGUAGE_CODES.put("ab", "ab"); // Abkhazian LANGUAGE_CODES.put("ace", "ace"); LANGUAGE_CODES.put("abe", "abe"); LANGUAGE_CODES.put("aeb-arab", "aeb-Arab"); LANGUAGE_CODES.put("ady", "ady"); LANGUAGE_CODES.put("af", "af"); LANGUAGE_CODES.put("ak", "ak"); LANGUAGE_CODES.put("aln", "aln"); LANGUAGE_CODES.put("als", "gsw"); // Swiss German (Alsatian/Alemannic) LANGUAGE_CODES.put("am", "am"); LANGUAGE_CODES.put("ami", "ami"); LANGUAGE_CODES.put("an", "an"); LANGUAGE_CODES.put("ang", "ang"); LANGUAGE_CODES.put("anp", "anp"); LANGUAGE_CODES.put("ar", "ar"); LANGUAGE_CODES.put("arc", "arc"); LANGUAGE_CODES.put("arn", "arn"); LANGUAGE_CODES.put("arq", "arq"); LANGUAGE_CODES.put("ary", "ary"); LANGUAGE_CODES.put("arz", "arz"); LANGUAGE_CODES.put("as", "as"); LANGUAGE_CODES.put("ast", "ast"); LANGUAGE_CODES.put("atj", "atj"); LANGUAGE_CODES.put("av", "av"); LANGUAGE_CODES.put("avk", "avk"); LANGUAGE_CODES.put("ay", "ay"); LANGUAGE_CODES.put("az", "az"); LANGUAGE_CODES.put("azb", "azb"); LANGUAGE_CODES.put("ba", "ba"); LANGUAGE_CODES.put("ban", "ban"); LANGUAGE_CODES.put("bar", "bar"); LANGUAGE_CODES.put("bat-smg", "sgs"); // TODO might be redundant // (Samogitian) LANGUAGE_CODES.put("bbc", "bbc"); LANGUAGE_CODES.put("bbc-latn", "bbc-Latn"); // Batak Toba, Latin script LANGUAGE_CODES.put("bcc", "bcc"); LANGUAGE_CODES.put("bcl", "bcl"); LANGUAGE_CODES.put("be", "be"); LANGUAGE_CODES.put("be-tarask", "be-tarask"); // Belarusian in // Taraskievica // orthography LANGUAGE_CODES.put("be-x-old", "be-tarask"); // TODO might be redundant LANGUAGE_CODES.put("bg", "bg"); LANGUAGE_CODES.put("bh", "bh"); LANGUAGE_CODES.put("bho", "bho"); LANGUAGE_CODES.put("bi", "bi"); LANGUAGE_CODES.put("bjn", "bjn"); LANGUAGE_CODES.put("bm", "bm"); LANGUAGE_CODES.put("bn", "bn"); LANGUAGE_CODES.put("bnn", "bnn"); LANGUAGE_CODES.put("bo", "bo"); LANGUAGE_CODES.put("bpy", "bpy"); LANGUAGE_CODES.put("bqi", "bqi"); LANGUAGE_CODES.put("br", "br"); LANGUAGE_CODES.put("brh", "brh"); LANGUAGE_CODES.put("brx", "brx"); LANGUAGE_CODES.put("bs", "bs"); LANGUAGE_CODES.put("bto", "bto"); LANGUAGE_CODES.put("bug", "bug"); LANGUAGE_CODES.put("bxr", "bxr"); LANGUAGE_CODES.put("ca", "ca"); LANGUAGE_CODES.put("cbk-zam", "cbk-x-zam"); // Chavacano de Zamboanga LANGUAGE_CODES.put("cdo", "cdo"); LANGUAGE_CODES.put("ceb", "ceb"); LANGUAGE_CODES.put("ce", "ce"); LANGUAGE_CODES.put("ch", "ch"); LANGUAGE_CODES.put("chn", "chn"); LANGUAGE_CODES.put("cho", "cho"); LANGUAGE_CODES.put("chr", "chr"); LANGUAGE_CODES.put("chy", "chy"); LANGUAGE_CODES.put("ckb", "ckb"); LANGUAGE_CODES.put("cnr", "cnr"); LANGUAGE_CODES.put("co", "co"); LANGUAGE_CODES.put("cop", "cop"); LANGUAGE_CODES.put("cps", "cps"); LANGUAGE_CODES.put("cr", "cr"); LANGUAGE_CODES.put("crh", "crh-Latn"); // TODO might be redundant LANGUAGE_CODES.put("crh-cyrl", "crh-Cyrl"); // Crimean Tatar/Crimean // Turkish; script Cyrillic LANGUAGE_CODES.put("crh-latn", "crh-Latn"); // Crimean Tatar/Crimean // Turkish; script Latin LANGUAGE_CODES.put("csb", "csb"); LANGUAGE_CODES.put("cs", "cs"); LANGUAGE_CODES.put("cu", "cu"); LANGUAGE_CODES.put("cv", "cv"); LANGUAGE_CODES.put("cy", "cy"); LANGUAGE_CODES.put("da", "da"); LANGUAGE_CODES.put("de-at", "de-AT"); // German, Austria LANGUAGE_CODES.put("de-ch", "de-CH"); // German, Switzerland LANGUAGE_CODES.put("de", "de"); // German LANGUAGE_CODES.put("de-formal", "de-x-formal"); // custom private subtag // for formal German LANGUAGE_CODES.put("din", "din"); LANGUAGE_CODES.put("diq", "diq"); LANGUAGE_CODES.put("dsb", "dsb"); LANGUAGE_CODES.put("dtp", "dtp"); LANGUAGE_CODES.put("dty", "dty"); LANGUAGE_CODES.put("dv", "dv"); LANGUAGE_CODES.put("dz", "dz"); LANGUAGE_CODES.put("ee", "ee"); LANGUAGE_CODES.put("egl", "egl"); LANGUAGE_CODES.put("el", "el"); LANGUAGE_CODES.put("eml", "eml"); // Emilian-Romagnol; 'eml' is now // retired and split into egl // (Emilian) and rgn (Romagnol), but // eml will remain a valid BCP 47 // language tag indefinitely (see // bugzilla:34217) LANGUAGE_CODES.put("en-ca", "en-CA"); // English; Canada LANGUAGE_CODES.put("en", "en"); // English LANGUAGE_CODES.put("en-gb", "en-GB"); // English; Great Britain LANGUAGE_CODES.put("eo", "eo"); // Esperanto LANGUAGE_CODES.put("es", "es"); LANGUAGE_CODES.put("et", "et"); LANGUAGE_CODES.put("ett", "ett"); LANGUAGE_CODES.put("eu", "eu"); LANGUAGE_CODES.put("ext", "ext"); LANGUAGE_CODES.put("eya", "eya"); LANGUAGE_CODES.put("fa", "fa"); LANGUAGE_CODES.put("ff", "ff"); LANGUAGE_CODES.put("fi", "fi"); LANGUAGE_CODES.put("fit", "fit"); // Tornedalen Finnish TODO check LANGUAGE_CODES.put("fiu-vro", "vro"); // TODO might be redundant LANGUAGE_CODES.put("fj", "fj"); LANGUAGE_CODES.put("fkv", "fkv"); LANGUAGE_CODES.put("fo", "fo"); LANGUAGE_CODES.put("fos", "fos"); LANGUAGE_CODES.put("frc", "frc"); LANGUAGE_CODES.put("fr", "fr"); LANGUAGE_CODES.put("fr-ca", "fr-CA"); LANGUAGE_CODES.put("frm", "frm"); LANGUAGE_CODES.put("fro", "fro"); LANGUAGE_CODES.put("frp", "frp"); LANGUAGE_CODES.put("frr", "frr"); LANGUAGE_CODES.put("fuf", "fuf"); LANGUAGE_CODES.put("fur", "fur"); LANGUAGE_CODES.put("fy", "fy"); LANGUAGE_CODES.put("ga", "ga"); LANGUAGE_CODES.put("gag", "gag"); LANGUAGE_CODES.put("gan", "gan"); // Gan Chinese; TODO which script? LANGUAGE_CODES.put("gan-hans", "gan-Hans"); // Gan Chinese; script Han // (simplified) LANGUAGE_CODES.put("gan-hant", "gan-Hant"); // Gan Chinese; script Han // (traditional) LANGUAGE_CODES.put("gd", "gd"); LANGUAGE_CODES.put("gez", "gez"); LANGUAGE_CODES.put("gl", "gl"); LANGUAGE_CODES.put("glk", "glk"); LANGUAGE_CODES.put("gn", "gn"); LANGUAGE_CODES.put("gom", "gom"); LANGUAGE_CODES.put("gor", "gor"); LANGUAGE_CODES.put("got", "got"); LANGUAGE_CODES.put("grc", "grc"); LANGUAGE_CODES.put("gsw", "gsw"); LANGUAGE_CODES.put("gu", "gu"); LANGUAGE_CODES.put("gv", "gv"); LANGUAGE_CODES.put("ha", "ha"); LANGUAGE_CODES.put("hai", "hai"); LANGUAGE_CODES.put("hak", "hak"); LANGUAGE_CODES.put("haw", "haw"); LANGUAGE_CODES.put("he", "he"); LANGUAGE_CODES.put("hi", "hi"); LANGUAGE_CODES.put("hif", "hif"); LANGUAGE_CODES.put("hif-deva", "hif-Deva"); LANGUAGE_CODES.put("hif-latn", "hif-Latn"); LANGUAGE_CODES.put("hil", "hil"); LANGUAGE_CODES.put("ho", "ho"); LANGUAGE_CODES.put("hr", "hr"); LANGUAGE_CODES.put("hrx", "hrx"); LANGUAGE_CODES.put("hsb", "hsb"); LANGUAGE_CODES.put("ht", "ht"); LANGUAGE_CODES.put("hu", "hu"); LANGUAGE_CODES.put("hy", "hy"); LANGUAGE_CODES.put("hz", "hz"); LANGUAGE_CODES.put("ia", "ia"); LANGUAGE_CODES.put("id", "id"); LANGUAGE_CODES.put("ie", "ie"); LANGUAGE_CODES.put("ig", "ig"); LANGUAGE_CODES.put("ii", "ii"); // Sichuan Yi LANGUAGE_CODES.put("ike-cans", "ike-Cans"); // Eastern Canadian // Inuktitut, Unified // Canadian Aboriginal // Syllabics script LANGUAGE_CODES.put("ike-latn", "ike-Latn"); // Eastern Canadian // Inuktitut, Latin script LANGUAGE_CODES.put("ik", "ik"); LANGUAGE_CODES.put("ilo", "ilo"); LANGUAGE_CODES.put("io", "io"); LANGUAGE_CODES.put("is", "is"); LANGUAGE_CODES.put("it", "it"); LANGUAGE_CODES.put("iu", "iu"); LANGUAGE_CODES.put("ja", "ja"); LANGUAGE_CODES.put("jam", "jam"); LANGUAGE_CODES.put("jbo", "jbo"); LANGUAGE_CODES.put("jut", "jut"); LANGUAGE_CODES.put("jv", "jv"); LANGUAGE_CODES.put("kaa", "kaa"); LANGUAGE_CODES.put("kab", "kab"); LANGUAGE_CODES.put("ka", "ka"); LANGUAGE_CODES.put("kbd", "kbd"); LANGUAGE_CODES.put("kbp", "kbp"); LANGUAGE_CODES.put("kea", "kea"); LANGUAGE_CODES.put("kg", "kg"); LANGUAGE_CODES.put("ki", "ki"); LANGUAGE_CODES.put("kj", "kj"); LANGUAGE_CODES.put("kjh", "kjh"); LANGUAGE_CODES.put("kiu", "kiu"); LANGUAGE_CODES.put("kk-arab", "kk-Arab");// Kazakh; script Arabic LANGUAGE_CODES.put("kk-cn", "kk-CN"); // Kazakh; PR China LANGUAGE_CODES.put("kk-cyrl", "kk-Cyrl"); // Kazakh; script Cyrillic; // TODO IANA has kk with // Suppress-Script: Cyrl, so // it should be the same as // kk LANGUAGE_CODES.put("kk", "kk"); // Kazakh LANGUAGE_CODES.put("kk-kz", "kk-KZ"); // Kazakh; Kazakhstan LANGUAGE_CODES.put("kk-latn", "kk-Latn"); // Kazakh; script Latin LANGUAGE_CODES.put("kk-tr", "kk-TR"); // Kazakh; Turkey LANGUAGE_CODES.put("kl", "kl"); LANGUAGE_CODES.put("km", "km"); LANGUAGE_CODES.put("kn", "kn"); LANGUAGE_CODES.put("koi", "koi"); LANGUAGE_CODES.put("ko", "ko"); LANGUAGE_CODES.put("ko-kp", "ko-KP"); // Korean; Democratic People's // Republic of Korea LANGUAGE_CODES.put("koy", "koy"); LANGUAGE_CODES.put("kr", "kr"); LANGUAGE_CODES.put("krc", "krc"); LANGUAGE_CODES.put("kri", "kri"); LANGUAGE_CODES.put("krj", "krj"); LANGUAGE_CODES.put("krl", "krl"); LANGUAGE_CODES.put("krx", "krx"); LANGUAGE_CODES.put("ksh", "mis-x-rip"); // Ripuarian (the code "ksh" // refers to Koelsch, a subset // of Ripuarian) LANGUAGE_CODES.put("ks", "ks"); LANGUAGE_CODES.put("ku-arab", "ku-Arab"); // Kurdish; script Arabic LANGUAGE_CODES.put("ku", "ku"); // Kurdish; TODO this is a // macrolanguage; anything more // specific? TODO all uses seem to be in // Latin -- should this be ku-Latn then? LANGUAGE_CODES.put("ku-latn", "ku-Latn"); // Kurdish; script Latin LANGUAGE_CODES.put("kv", "kv"); LANGUAGE_CODES.put("kw", "kw"); LANGUAGE_CODES.put("ky", "ky"); LANGUAGE_CODES.put("la", "la"); LANGUAGE_CODES.put("lad", "lad"); LANGUAGE_CODES.put("lag", "lag"); LANGUAGE_CODES.put("lbe", "lbe"); LANGUAGE_CODES.put("lb", "lb"); LANGUAGE_CODES.put("lez", "lez"); LANGUAGE_CODES.put("lfn", "lfn"); LANGUAGE_CODES.put("lg", "lg"); LANGUAGE_CODES.put("lij", "lij"); LANGUAGE_CODES.put("li", "li"); LANGUAGE_CODES.put("liv", "liv"); LANGUAGE_CODES.put("lkt", "lkt"); LANGUAGE_CODES.put("lld", "lld"); LANGUAGE_CODES.put("lmo", "lmo"); LANGUAGE_CODES.put("ln", "ln"); LANGUAGE_CODES.put("lo", "lo"); LANGUAGE_CODES.put("loz", "loz"); LANGUAGE_CODES.put("lrc", "lrc"); LANGUAGE_CODES.put("ltg", "ltg"); LANGUAGE_CODES.put("lt", "lt"); LANGUAGE_CODES.put("lus", "lus"); LANGUAGE_CODES.put("lv", "lv"); LANGUAGE_CODES.put("lzh", "lzh"); // Literary Chinese LANGUAGE_CODES.put("lzz", "lzz"); LANGUAGE_CODES.put("mai", "mai"); LANGUAGE_CODES.put("map-bms", "jv-x-bms"); // Basa Banyumasan has no // code; jv is a superset // (Javanese) LANGUAGE_CODES.put("mdf", "mdf"); LANGUAGE_CODES.put("mg", "mg"); LANGUAGE_CODES.put("mh", "mh"); LANGUAGE_CODES.put("mhr", "mhr"); LANGUAGE_CODES.put("mi", "mi"); LANGUAGE_CODES.put("mis", "mis"); LANGUAGE_CODES.put("min", "min"); LANGUAGE_CODES.put("mk", "mk"); LANGUAGE_CODES.put("ml", "ml"); LANGUAGE_CODES.put("mn", "mn"); LANGUAGE_CODES.put("mnc", "mnc"); LANGUAGE_CODES.put("mo", "mo"); LANGUAGE_CODES.put("moe", "moe"); LANGUAGE_CODES.put("mrj", "mrj"); LANGUAGE_CODES.put("mr", "mr"); LANGUAGE_CODES.put("ms", "ms"); LANGUAGE_CODES.put("mt", "mt"); LANGUAGE_CODES.put("mul", "mul"); LANGUAGE_CODES.put("mus", "mus"); LANGUAGE_CODES.put("mwl", "mwl"); LANGUAGE_CODES.put("my", "my"); LANGUAGE_CODES.put("myv", "myv"); LANGUAGE_CODES.put("mzn", "mzn"); LANGUAGE_CODES.put("nah", "nah"); LANGUAGE_CODES.put("na", "na"); LANGUAGE_CODES.put("nan", "nan"); LANGUAGE_CODES.put("nap", "nap"); LANGUAGE_CODES.put("nb", "nb"); LANGUAGE_CODES.put("nds", "nds"); // Low German LANGUAGE_CODES.put("nds-nl", "nds-NL"); // Low German, Netherlands; TODO // might be redundant (nds might // be the same) LANGUAGE_CODES.put("ne", "ne"); LANGUAGE_CODES.put("new", "new"); LANGUAGE_CODES.put("ng", "ng"); LANGUAGE_CODES.put("nui", "nui"); LANGUAGE_CODES.put("nl-informal", "nl-x-informal"); // custom private // subtag for // informal Dutch LANGUAGE_CODES.put("nl", "nl"); LANGUAGE_CODES.put("nn", "nn"); LANGUAGE_CODES.put("no", "no"); // TODO possibly this is "nb" (Norwegian // Bokmål); but current dumps have // different values for "nb" and "no" in // some cases LANGUAGE_CODES.put("non", "non"); LANGUAGE_CODES.put("nov", "nov"); LANGUAGE_CODES.put("niu", "niu"); LANGUAGE_CODES.put("nr", "nr"); LANGUAGE_CODES.put("nrm", "fr-x-nrm"); // Norman; no individual code; // lumped with French in ISO // 639/3 LANGUAGE_CODES.put("nso", "nso"); LANGUAGE_CODES.put("nv", "nv"); LANGUAGE_CODES.put("nxm", "nxm"); LANGUAGE_CODES.put("ny", "ny"); LANGUAGE_CODES.put("nys", "nys"); LANGUAGE_CODES.put("oc", "oc"); LANGUAGE_CODES.put("olo", "olo"); LANGUAGE_CODES.put("om", "om"); LANGUAGE_CODES.put("ood", "ood"); LANGUAGE_CODES.put("or", "or"); LANGUAGE_CODES.put("os", "os"); LANGUAGE_CODES.put("otk", "otk"); LANGUAGE_CODES.put("pag", "pag"); LANGUAGE_CODES.put("pam", "pam"); LANGUAGE_CODES.put("pa", "pa"); LANGUAGE_CODES.put("pap", "pap"); LANGUAGE_CODES.put("pcd", "pcd"); LANGUAGE_CODES.put("pdc", "pdc"); LANGUAGE_CODES.put("pdt", "pdt"); LANGUAGE_CODES.put("pfl", "pfl"); LANGUAGE_CODES.put("pih", "pih"); LANGUAGE_CODES.put("pi", "pi"); LANGUAGE_CODES.put("pjt", "pjt"); LANGUAGE_CODES.put("pl", "pl"); LANGUAGE_CODES.put("pms", "pms"); LANGUAGE_CODES.put("pnb", "pnb"); LANGUAGE_CODES.put("pnt", "pnt"); LANGUAGE_CODES.put("ppu", "ppu"); LANGUAGE_CODES.put("prg", "prg"); LANGUAGE_CODES.put("ps", "ps"); LANGUAGE_CODES.put("pt-br", "pt-BR"); // Portuguese, Brazil LANGUAGE_CODES.put("pt", "pt"); // Portuguese LANGUAGE_CODES.put("pwd", "pwd"); LANGUAGE_CODES.put("pyu", "pyu"); LANGUAGE_CODES.put("qu", "qu"); LANGUAGE_CODES.put("quc", "quc"); LANGUAGE_CODES.put("qug", "qug"); LANGUAGE_CODES.put("rgn", "rgn"); LANGUAGE_CODES.put("rif", "rif"); LANGUAGE_CODES.put("rm", "rm"); LANGUAGE_CODES.put("rmy", "rmy"); LANGUAGE_CODES.put("rn", "rn"); LANGUAGE_CODES.put("roa-rup", "rup"); // TODO might be redundant LANGUAGE_CODES.put("roa-tara", "it-x-tara"); // Tarantino; no language // code, ISO 639-3 lumps // it with Italian LANGUAGE_CODES.put("ro", "ro"); LANGUAGE_CODES.put("ru", "ru"); LANGUAGE_CODES.put("rue", "rue"); LANGUAGE_CODES.put("rup", "rup"); // Macedo-Romanian/Aromanian LANGUAGE_CODES.put("ruq-latn", "ruq-Latn"); LANGUAGE_CODES.put("rw", "rw"); LANGUAGE_CODES.put("rwr", "rwr"); LANGUAGE_CODES.put("sah", "sah"); LANGUAGE_CODES.put("sa", "sa"); LANGUAGE_CODES.put("sat", "sat"); LANGUAGE_CODES.put("scn", "scn"); LANGUAGE_CODES.put("sco", "sco"); LANGUAGE_CODES.put("sc", "sc"); LANGUAGE_CODES.put("sd", "sd"); LANGUAGE_CODES.put("sdc", "sdc"); LANGUAGE_CODES.put("se", "se"); LANGUAGE_CODES.put("sei", "sei"); LANGUAGE_CODES.put("sg", "sg"); LANGUAGE_CODES.put("sgs", "sgs"); LANGUAGE_CODES.put("sh", "sh"); // Serbo-Croatian; macrolanguage, not modern but a valid BCP 47 tag LANGUAGE_CODES.put("shi", "shi"); LANGUAGE_CODES.put("shi-latn", "shi-Latn"); LANGUAGE_CODES.put("shy", "shy"); LANGUAGE_CODES.put("simple", "en-x-simple"); // custom private subtag // for simple English LANGUAGE_CODES.put("si", "si"); LANGUAGE_CODES.put("sjd", "sjd"); LANGUAGE_CODES.put("sje", "sje"); LANGUAGE_CODES.put("sjm", "sjm"); LANGUAGE_CODES.put("sju", "sju"); LANGUAGE_CODES.put("sk", "sk"); LANGUAGE_CODES.put("sl", "sl"); LANGUAGE_CODES.put("sli", "sli"); LANGUAGE_CODES.put("sm", "sm"); LANGUAGE_CODES.put("sma", "sma"); LANGUAGE_CODES.put("smj", "smj"); LANGUAGE_CODES.put("smn", "smn"); LANGUAGE_CODES.put("sms", "sms"); LANGUAGE_CODES.put("sn", "sn"); LANGUAGE_CODES.put("so", "so"); LANGUAGE_CODES.put("sq", "sq"); LANGUAGE_CODES.put("sr-ec", "sr-Cyrl"); // Serbian; Cyrillic script // (might change if dialect // codes are added to IANA) LANGUAGE_CODES.put("sr-el", "sr-Latn"); // Serbian; Latin script (might // change if dialect codes are // added to IANA) LANGUAGE_CODES.put("sr", "sr"); // Serbian TODO should probably be // sr-Cyrl too? LANGUAGE_CODES.put("srn", "srn"); LANGUAGE_CODES.put("srq", "srq"); LANGUAGE_CODES.put("ss", "ss"); LANGUAGE_CODES.put("ssf", "ssf"); LANGUAGE_CODES.put("stq", "stq"); LANGUAGE_CODES.put("st", "st"); LANGUAGE_CODES.put("su", "su"); LANGUAGE_CODES.put("sv", "sv"); LANGUAGE_CODES.put("sw", "sw"); LANGUAGE_CODES.put("szl", "szl"); LANGUAGE_CODES.put("ta", "ta"); LANGUAGE_CODES.put("tcy", "tcy"); LANGUAGE_CODES.put("te", "te"); LANGUAGE_CODES.put("tet", "tet"); LANGUAGE_CODES.put("tg", "tg"); LANGUAGE_CODES.put("tg-latn", "tg-Latn"); // Tajik; script Latin LANGUAGE_CODES.put("tg-cyrl", "tg-Cyrl"); // Tajik; script Cyrillic LANGUAGE_CODES.put("th", "th"); LANGUAGE_CODES.put("ti", "ti"); LANGUAGE_CODES.put("tk", "tk"); LANGUAGE_CODES.put("tl", "tl"); LANGUAGE_CODES.put("tn", "tn"); LANGUAGE_CODES.put("tokipona", "mis-x-tokipona"); // Tokipona, a // constructed // language without // a code LANGUAGE_CODES.put("to", "to"); LANGUAGE_CODES.put("tpi", "tpi"); LANGUAGE_CODES.put("tr", "tr"); LANGUAGE_CODES.put("trv", "trv"); LANGUAGE_CODES.put("ts", "ts"); LANGUAGE_CODES.put("tt", "tt"); // Tatar LANGUAGE_CODES.put("tt-cyrl", "tt-Cyrl"); // Tatar; Cyrillic script LANGUAGE_CODES.put("tt-latn", "tt-Latn"); // Tatar; Latin script LANGUAGE_CODES.put("tum", "tum"); LANGUAGE_CODES.put("tw", "tw"); LANGUAGE_CODES.put("ty", "ty"); LANGUAGE_CODES.put("tyv", "tyv"); LANGUAGE_CODES.put("tzl", "tzl"); LANGUAGE_CODES.put("udm", "udm"); LANGUAGE_CODES.put("ug", "ug"); // Uyghur LANGUAGE_CODES.put("ug-arab", "ug-Arab"); // Uyghur, Arab script LANGUAGE_CODES.put("ug-latn", "ug-Latn"); // Uyghur, Latin script LANGUAGE_CODES.put("uk", "uk"); LANGUAGE_CODES.put("und", "und"); LANGUAGE_CODES.put("umu", "umu"); LANGUAGE_CODES.put("ur", "ur"); LANGUAGE_CODES.put("uun", "uun"); LANGUAGE_CODES.put("uz", "uz"); LANGUAGE_CODES.put("tru", "tru"); LANGUAGE_CODES.put("vec", "vec"); LANGUAGE_CODES.put("vep", "vep"); LANGUAGE_CODES.put("ve", "ve"); LANGUAGE_CODES.put("vi", "vi"); LANGUAGE_CODES.put("vls", "vls"); LANGUAGE_CODES.put("vmf", "vmf"); LANGUAGE_CODES.put("vo", "vo"); LANGUAGE_CODES.put("vot", "vot"); LANGUAGE_CODES.put("vro", "vro"); LANGUAGE_CODES.put("war", "war"); LANGUAGE_CODES.put("wa", "wa"); LANGUAGE_CODES.put("wo", "wo"); LANGUAGE_CODES.put("wuu", "wuu"); LANGUAGE_CODES.put("xal", "xal"); LANGUAGE_CODES.put("xh", "xh"); LANGUAGE_CODES.put("xmf", "xmf"); LANGUAGE_CODES.put("xpu", "xpu"); LANGUAGE_CODES.put("yap", "yap"); LANGUAGE_CODES.put("yi", "yi"); LANGUAGE_CODES.put("yo", "yo"); LANGUAGE_CODES.put("yue", "yue"); // Cantonese LANGUAGE_CODES.put("za", "za"); LANGUAGE_CODES.put("zea", "zea"); LANGUAGE_CODES.put("zh-classical", "lzh"); // TODO might be redundant LANGUAGE_CODES.put("zh-cn", "zh-CN"); // Chinese, PRC LANGUAGE_CODES.put("zh-hans", "zh-Hans"); // Chinese; script Han // (simplified) LANGUAGE_CODES.put("zh-hant", "zh-Hant"); // Chinese; script Han // (traditional) LANGUAGE_CODES.put("zh-hk", "zh-HK"); // Chinese, Hong Kong LANGUAGE_CODES.put("zh-min-nan", "nan"); // TODO might be redundant LANGUAGE_CODES.put("zh-mo", "zh-MO"); // Chinese, Macao LANGUAGE_CODES.put("zh-my", "zh-MY"); // Chinese, Malaysia LANGUAGE_CODES.put("zh-sg", "zh-SG"); // Chinese, Singapore LANGUAGE_CODES.put("zh-tw", "zh-TW"); // Chinese, Taiwan, Province of // China LANGUAGE_CODES.put("zh-yue", "yue"); // TODO might be redundant LANGUAGE_CODES.put("zh", "zh"); // Chinese; TODO zh is a macrolanguage; // should this be cmn? Also, is this the // same as zh-Hans or zh-Hant? LANGUAGE_CODES.put("zu", "zu"); // Zulu LANGUAGE_CODES.put("zun", "zun"); LANGUAGE_CODES.put("zxx", "zxx"); } static Map DEPRECATED_LANGUAGE_CODES = new HashMap<>(); static { /* * Source: * https://www.mediawiki.org/wiki/Manual:$wgExtraLanguageCodes */ DEPRECATED_LANGUAGE_CODES.put("bh","bho"); // Bihari language family DEPRECATED_LANGUAGE_CODES.put("no","nb"); // Norwegian language family DEPRECATED_LANGUAGE_CODES.put("simple","en"); // Simple English /* * Source: * https://www.mediawiki.org/wiki/Manual:$wgDummyLanguageCodes * The ones already included above have been omitted, as well as "qqq" and "qqx". */ DEPRECATED_LANGUAGE_CODES.put("als", "gsw"); DEPRECATED_LANGUAGE_CODES.put("bat-smg", "sgs"); DEPRECATED_LANGUAGE_CODES.put("be-x-old", "be-tarask"); DEPRECATED_LANGUAGE_CODES.put("fiu-vro", "vro"); DEPRECATED_LANGUAGE_CODES.put("roa-rup", "rup"); DEPRECATED_LANGUAGE_CODES.put("zh-classical", "lzh"); DEPRECATED_LANGUAGE_CODES.put("zh-min-nan", "nan"); DEPRECATED_LANGUAGE_CODES.put("zh-yue", "yue"); } /** * Get a BCP 47 * language code for the given Wikimedia language code. * * @param wikimediaLanguageCode * the language code as used by Wikimedia * @return the BCP 47 language code * @throws IllegalArgumentException * if the given Wikimedia language code is not known. In * particular, the method will not assume that unknown codes * agree with BCP 47 by default (since they have no reason to do * this). */ public static String getLanguageCode(String wikimediaLanguageCode) { if (LANGUAGE_CODES.containsKey(wikimediaLanguageCode)) { return LANGUAGE_CODES.get(wikimediaLanguageCode); } else { throw new IllegalArgumentException("Unknown Wikimedia language \"" + wikimediaLanguageCode + "\"."); } } /** * Translate a Wikimedia language code to its preferred value * if this code is deprecated, or return it untouched if the string * is not a known deprecated Wikimedia language code * * @param wikimediaLanguageCode * the language code as used by Wikimedia * @return * the preferred language code corresponding to the original language code */ public static String fixLanguageCodeIfDeprecated(String wikimediaLanguageCode) { return DEPRECATED_LANGUAGE_CODES.getOrDefault(wikimediaLanguageCode, wikimediaLanguageCode); } } package-info.java000066400000000000000000000014761444772566300350730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/** * Interfaces for representing Wikibase data and related factories. * * @author Markus Kroetzsch */ package org.wikidata.wdtk.datamodel.interfaces; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/000077500000000000000000000000001444772566300214535ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/000077500000000000000000000000001444772566300223745ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/000077500000000000000000000000001444772566300231635ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/000077500000000000000000000000001444772566300247605ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300257315ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/000077500000000000000000000000001444772566300276635ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/000077500000000000000000000000001444772566300313255ustar00rootroot00000000000000AliasUpdateBuilderTest.java000066400000000000000000000225031444772566300364560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.*; import java.util.Arrays; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; public class AliasUpdateBuilderTest { static final MonolingualTextValue WALK = Datamodel.makeMonolingualTextValue("walk", "en"); static final MonolingualTextValue STROLL = Datamodel.makeMonolingualTextValue("stroll", "en"); static final MonolingualTextValue TRAVEL = Datamodel.makeMonolingualTextValue("travel", "en"); static final MonolingualTextValue WANDER = Datamodel.makeMonolingualTextValue("wander", "en"); static final MonolingualTextValue GEHEN = Datamodel.makeMonolingualTextValue("gehen", "de"); @Test public void testCreate() { AliasUpdate update = AliasUpdateBuilder.create().build(); assertFalse(update.getRecreated().isPresent()); assertThat(update.getAdded(), is(empty())); assertThat(update.getRemoved(), is(empty())); } @Test public void testForTerms() { assertThrows(NullPointerException.class, () -> AliasUpdateBuilder.forAliases(null)); assertThrows(NullPointerException.class, () -> AliasUpdateBuilder.forAliases(Arrays.asList(WALK, null))); assertThrows(IllegalArgumentException.class, () -> AliasUpdateBuilder.forAliases(Arrays.asList(WALK, WALK))); assertThrows(IllegalArgumentException.class, () -> AliasUpdateBuilder.forAliases(Arrays.asList(WALK, GEHEN))); AliasUpdate update = AliasUpdateBuilder.forAliases(Arrays.asList(WALK, STROLL)).build(); assertFalse(update.getRecreated().isPresent()); assertThat(update.getAdded(), is(empty())); assertThat(update.getRemoved(), is(empty())); } @Test public void testBlindAddition() { AliasUpdateBuilder builder = AliasUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.add(null)); builder.add(WALK); // simple case assertThrows(IllegalArgumentException.class, () -> builder.add(GEHEN)); builder.remove(TRAVEL); builder.remove(WANDER); builder.add(STROLL); builder.add(STROLL); // add twice builder.add(TRAVEL); // previously removed AliasUpdate update = builder.build(); assertFalse(update.getRecreated().isPresent()); assertThat(update.getRemoved(), containsInAnyOrder(WANDER)); assertThat(update.getAdded(), contains(WALK, STROLL)); } @Test public void testRecreatedAddition() { AliasUpdateBuilder builder = AliasUpdateBuilder.create(); builder.recreate(Arrays.asList(WALK, WANDER, TRAVEL)); assertThrows(IllegalArgumentException.class, () -> builder.add(GEHEN)); builder.add(STROLL); // simple case builder.add(WANDER); // duplicate AliasUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), is(empty())); assertThat(update.getRecreated().get(), contains(WALK, WANDER, TRAVEL, STROLL)); } @Test public void testBaseAddition() { AliasUpdateBuilder builder = AliasUpdateBuilder.forAliases(Arrays.asList(WALK, TRAVEL)); assertThrows(IllegalArgumentException.class, () -> builder.add(GEHEN)); builder.add(STROLL); // simple case builder.add(WALK); // duplicate AliasUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), contains(STROLL)); assertFalse(update.getRecreated().isPresent()); } @Test public void testRecreatedBaseAddition() { AliasUpdateBuilder builder = AliasUpdateBuilder.forAliases(Arrays.asList(WALK, TRAVEL, STROLL)); builder.recreate(Arrays.asList(WALK)); builder.add(TRAVEL); // add to recreated assertThat(builder.build().getRecreated().get(), contains(WALK, TRAVEL)); builder.add(STROLL); // cancel recreation assertTrue(builder.build().isEmpty()); builder.add(WALK); // duplicate builder.add(WANDER); // add to base AliasUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), contains(WANDER)); assertFalse(update.getRecreated().isPresent()); } @Test public void testBlindRemoval() { AliasUpdateBuilder builder = AliasUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.remove(null)); builder.remove(WALK); // simple case assertThrows(IllegalArgumentException.class, () -> builder.remove(GEHEN)); builder.add(TRAVEL); builder.add(WANDER); builder.remove(STROLL); builder.remove(STROLL); // remove twice builder.remove(TRAVEL); // previously added AliasUpdate update = builder.build(); assertFalse(update.getRecreated().isPresent()); assertThat(update.getAdded(), contains(WANDER)); assertThat(update.getRemoved(), containsInAnyOrder(WALK, STROLL)); } @Test public void testRecreatedRemoval() { AliasUpdateBuilder builder = AliasUpdateBuilder.create(); builder.recreate(Arrays.asList(WALK, WANDER, TRAVEL)); assertThrows(IllegalArgumentException.class, () -> builder.remove(GEHEN)); builder.remove(WANDER); // simple case builder.remove(STROLL); // not present AliasUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), is(empty())); assertThat(update.getRecreated().get(), contains(WALK, TRAVEL)); } @Test public void testBaseRemoval() { AliasUpdateBuilder builder = AliasUpdateBuilder.forAliases(Arrays.asList(WALK, TRAVEL, WANDER)); assertThrows(IllegalArgumentException.class, () -> builder.remove(GEHEN)); builder.remove(TRAVEL); // simple case builder.remove(STROLL); // not found AliasUpdate update = builder.build(); assertThat(update.getAdded(), is(empty())); assertThat(update.getRemoved(), contains(TRAVEL)); assertFalse(update.getRecreated().isPresent()); } @Test public void testRecreatedBaseRemoval() { AliasUpdateBuilder builder = AliasUpdateBuilder.forAliases(Arrays.asList(WALK)); builder.recreate(Arrays.asList(WALK, TRAVEL, STROLL)); builder.remove(TRAVEL); // remove from recreated assertThat(builder.build().getRecreated().get(), contains(WALK, STROLL)); builder.remove(STROLL); // cancel recreation assertTrue(builder.build().isEmpty()); builder.remove(WANDER); // not found builder.remove(WALK); // remove from base AliasUpdate update = builder.build(); assertThat(update.getAdded(), is(empty())); assertThat(update.getRemoved(), contains(WALK)); assertFalse(update.getRecreated().isPresent()); } @Test public void testBlindRecreation() { AliasUpdateBuilder builder = AliasUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.recreate(null)); assertThrows(NullPointerException.class, () -> builder.recreate(Arrays.asList(WALK, null))); assertThrows(IllegalArgumentException.class, () -> builder.recreate(Arrays.asList(WALK, WALK))); assertThrows(IllegalArgumentException.class, () -> builder.recreate(Arrays.asList(WALK, GEHEN))); builder.add(WANDER); builder.remove(WALK); builder.recreate(Arrays.asList(WALK, STROLL)); AliasUpdate update = builder.build(); assertThat(update.getRecreated().get(), contains(WALK, STROLL)); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), is(empty())); } @Test public void testBaseRecreation() { AliasUpdateBuilder builder = AliasUpdateBuilder.forAliases(Arrays.asList(STROLL, TRAVEL, WALK)); builder.add(WANDER); builder.remove(WALK); builder.recreate(Arrays.asList(WALK, STROLL)); AliasUpdate update = builder.build(); assertThat(update.getRecreated().get(), contains(WALK, STROLL)); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), is(empty())); builder.recreate(Arrays.asList(STROLL, TRAVEL, WALK)); assertTrue(builder.build().isEmpty()); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> AliasUpdateBuilder.create().append(null)); AliasUpdate update = AliasUpdateBuilder.create() .add(WALK) // prior addition .remove(STROLL) // prior removal .append(AliasUpdateBuilder.create() .add(TRAVEL) // another addition .remove(WANDER) // another removal .build()) .build(); assertFalse(update.getRecreated().isPresent()); assertThat(update.getAdded(), contains(WALK, TRAVEL)); assertThat(update.getRemoved(), containsInAnyOrder(STROLL, WANDER)); update = AliasUpdateBuilder.create() .add(WALK) // any prior change .append(AliasUpdateBuilder.create() .recreate(Arrays.asList(WALK, STROLL)) .build()) .build(); assertThat(update.getRecreated().get(), contains(WALK, STROLL)); assertThat(update.getRemoved(), is(empty())); assertThat(update.getAdded(), is(empty())); } } DatamodelConverterTest.java000066400000000000000000000360771444772566300365500ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.junit.Assert.*; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.implementation.GlobeCoordinatesValueImpl; import org.wikidata.wdtk.datamodel.implementation.ItemIdValueImpl; import org.wikidata.wdtk.datamodel.implementation.MonolingualTextValueImpl; import org.wikidata.wdtk.datamodel.implementation.PropertyIdValueImpl; import org.wikidata.wdtk.datamodel.implementation.QuantityValueImpl; import org.wikidata.wdtk.datamodel.implementation.SnakGroupImpl; import org.wikidata.wdtk.datamodel.implementation.StatementGroupImpl; import org.wikidata.wdtk.datamodel.implementation.StatementImpl; import org.wikidata.wdtk.datamodel.implementation.StringValueImpl; import org.wikidata.wdtk.datamodel.implementation.TimeValueImpl; import org.wikidata.wdtk.datamodel.implementation.ValueSnakImpl; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; /** * Test for special aspects of {@link DatamodelConverter}. Regular operation is * tested elsewhere already. * * @author Markus Kroetzsch * */ public class DatamodelConverterTest { static class BrokenItemIdValue implements ItemIdValue { @Override public String getEntityType() { return ItemIdValue.ET_ITEM; } @Override public String getId() { return null; // illegal; should cause errors elsewhere } @Override public String getSiteIri() { return Datamodel.SITE_WIKIDATA; } @Override public String getIri() { return null; } @Override public T accept(ValueVisitor valueVisitor) { return valueVisitor.visit(this); } @Override public boolean isPlaceholder() { return false; } } private Statement getBrokenStatement() { Snak brokenSnak = Datamodel.makeValueSnak( getTestPropertyIdValue(5), new BrokenItemIdValue()); return Datamodel.makeStatement( getTestItemIdValue(2), brokenSnak, Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "id"); } /** * Tests that statement groups that contain a single statement which cannot * be copied are removed. */ @Test public void testSingleBrokenStatement() { StatementGroup sg1 = Datamodel.makeStatementGroup(Collections .singletonList(getBrokenStatement())); StatementGroup sg2 = getTestStatementGroup(2, 5, 1, EntityIdValue.ET_ITEM); List brokenSgs = new ArrayList<>(); brokenSgs.add(sg1); brokenSgs.add(sg2); List fixedSgs = new ArrayList<>(); fixedSgs.add(sg2); ItemDocument brokenId = Datamodel.makeItemDocument( getTestItemIdValue(2), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), brokenSgs, Collections.emptyMap()); ItemDocument fixedId = Datamodel.makeItemDocument( getTestItemIdValue(2), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), fixedSgs, Collections.emptyMap()); DatamodelConverter dmc = new DatamodelConverter( new DataObjectFactoryImpl()); assertEquals(fixedId, dmc.copy(brokenId)); } /** * Tests that statement groups that contain several statements, one of which * cannot be copied, are reduced to the working statements. */ @Test public void testBrokenStatement() { List brokenSg1Statements = new ArrayList<>(); brokenSg1Statements.add(getTestStatement(2, 5, 1, EntityIdValue.ET_ITEM)); brokenSg1Statements.add(getBrokenStatement()); brokenSg1Statements.add(getTestStatement(2, 5, 2, EntityIdValue.ET_ITEM)); StatementGroup brokenSg1 = Datamodel .makeStatementGroup(brokenSg1Statements); List fixedSg1Statements = new ArrayList<>(); fixedSg1Statements.add(getTestStatement(2, 5, 1, EntityIdValue.ET_ITEM)); fixedSg1Statements.add(getTestStatement(2, 5, 2, EntityIdValue.ET_ITEM)); StatementGroup fixedSg1 = Datamodel .makeStatementGroup(fixedSg1Statements); StatementGroup sg2 = getTestStatementGroup(2, 5, 1, EntityIdValue.ET_ITEM); List brokenSgs = new ArrayList<>(); brokenSgs.add(brokenSg1); brokenSgs.add(sg2); List fixedSgs = new ArrayList<>(); fixedSgs.add(fixedSg1); fixedSgs.add(sg2); ItemDocument brokenId = Datamodel.makeItemDocument( getTestItemIdValue(2), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), brokenSgs, Collections.emptyMap()); ItemDocument fixedId = Datamodel.makeItemDocument( getTestItemIdValue(2), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), fixedSgs, Collections.emptyMap()); DatamodelConverter dmc = new DatamodelConverter( new DataObjectFactoryImpl()); assertEquals(fixedId, dmc.copy(brokenId)); } @Test public void testGenerationFromOtherItemDocument() { ItemDocument item = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "label")), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "desc")), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "alias")), Collections.emptyList(), Collections.singletonMap("enwiki", Datamodel.makeSiteLink("foo", "enwiki", Collections.emptyList())) ); DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); assertEquals(item, converter.copy(item)); } @Test public void testCopyMediaInfoIdValue() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); MediaInfoIdValue mediaInfo = getTestMediaInfoIdValue(34); assertEquals(mediaInfo, converter.copy(mediaInfo)); } @Test public void testCopyLexemeIdValue() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); LexemeIdValue lexeme = getTestLexemeIdValue(45); assertEquals(lexeme, converter.copy(lexeme)); } @Test public void testCopyFormIdValue() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); FormIdValue form = getTestFormIdValue(56); assertEquals(form, converter.copy(form)); } @Test public void testCopySenseIdValue() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); SenseIdValue sense = getTestSenseIdValue(56); assertEquals(sense, converter.copy(sense)); } @Test public void testMediaInfoDocument() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); MediaInfoDocument document = Datamodel.makeMediaInfoDocument( getTestMediaInfoIdValue(78), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "label")), Collections.emptyList()); assertEquals(document, converter.copy(document)); } @Test public void testLexemeDocument() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); LexemeDocument document = Datamodel.makeLexemeDocument( getTestLexemeIdValue(90), getTestItemIdValue(38), getTestItemIdValue(39), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "lemma"))); assertEquals(document, converter.copy(document)); } @Test public void testFormDocument() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); FormDocument document = Datamodel.makeFormDocument( getTestFormIdValue(92), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertEquals(document, converter.copy(document)); } @Test public void testSenseDocument() { DatamodelConverter converter = new DatamodelConverter(new DataObjectFactoryImpl()); SenseDocument document = Datamodel.makeSenseDocument( getTestSenseIdValue(738), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "gloss")), Collections.emptyList()); assertEquals(document, converter.copy(document)); } public enum ValueType { STRING, ITEM, GLOBE_COORDINATES, TIME, QUANTITY, MONOLINGUAL_TEXT; protected static ValueType fromInt(int seed) { switch (seed % 6) { case 0: return STRING; case 1: return ITEM; case 2: return GLOBE_COORDINATES; case 3: return TIME; case 4: return QUANTITY; default: case 5: return MONOLINGUAL_TEXT; } } } private ItemIdValue getTestItemIdValue(int seed) { return new ItemIdValueImpl("Q4" + seed, "foo:"); } private PropertyIdValue getTestPropertyIdValue(int seed) { return new PropertyIdValueImpl("P4" + seed, "foo:"); } private MediaInfoIdValue getTestMediaInfoIdValue(int seed) { return Datamodel.makeMediaInfoIdValue("M4" + seed, "foo:"); } private LexemeIdValue getTestLexemeIdValue(int seed) { return Datamodel.makeLexemeIdValue("L4" + seed, "foo:"); } private FormIdValue getTestFormIdValue(int seed) { return Datamodel.makeFormIdValue("L4" + seed + "-F1", "foo:"); } private SenseIdValue getTestSenseIdValue(int seed) { return Datamodel.makeSenseIdValue("L4" + seed + "-S1", "foo:"); } private EntityIdValue getTestEntityIdValue(int seed, String entityType) { switch (entityType) { case EntityIdValue.ET_ITEM: return getTestItemIdValue(seed); case EntityIdValue.ET_PROPERTY: return getTestPropertyIdValue(seed); default: throw new IllegalArgumentException("Unsupported entity type " + entityType); } } private TimeValue getTestTimeValue(int seed) { return new TimeValueImpl(2007 + seed, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_DAY, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); } private GlobeCoordinatesValue getTestGlobeCoordinatesValue(int seed) { return new GlobeCoordinatesValueImpl((10 + seed) * GlobeCoordinatesValue.PREC_DEGREE, (1905 + seed) * GlobeCoordinatesValue.PREC_DECI_DEGREE, GlobeCoordinatesValue.PREC_DECI_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); } private StringValue getTestStringValue(int seed) { return new StringValueImpl("foo" + seed); } private MonolingualTextValue getTestMonolingualTextValue(int seed, String language) { return new MonolingualTextValueImpl("foo" + seed, language); } private QuantityValue getTestQuantityValue(int seed) { BigDecimal nv = new BigDecimal(seed + ".123456789012345678901234567890123456789"); BigDecimal lb = new BigDecimal(seed + ".123456789012345678901234567890123456788"); BigDecimal ub = new BigDecimal(seed + ".123456789012345678901234567890123456790"); return new QuantityValueImpl(nv, lb, ub, Datamodel.makeWikidataItemIdValue("Q11573")); } private Value getTestValue(ValueType valueType, int seed) { switch (valueType) { case GLOBE_COORDINATES: return getTestGlobeCoordinatesValue(seed); case ITEM: return getTestItemIdValue(seed); case MONOLINGUAL_TEXT: return getTestMonolingualTextValue(seed, "de"); case QUANTITY: return getTestQuantityValue(seed); case STRING: return getTestStringValue(seed); case TIME: return getTestTimeValue(seed); default: throw new RuntimeException("Unsupported value type."); } } private ValueSnak getTestValueSnak(ValueType valueType, int pseed, int vseed) { PropertyIdValue property = getTestPropertyIdValue(pseed); Value value = getTestValue(valueType, vseed); return new ValueSnakImpl(property, value); } private SnakGroup getTestValueSnakGroup(ValueType valueType, int pseed, int size) { List snaks = new ArrayList<>(size); for (int i = 0; i < size; i++) { snaks.add(getTestValueSnak(valueType, pseed, i)); } return new SnakGroupImpl(snaks); } private List getTestValueSnakGroups(int seed, int size) { List snakGroups = new ArrayList<>(size); for (int i = 0; i < size; i++) { SnakGroup group = getTestValueSnakGroup(ValueType.fromInt(i + seed), i + seed, i + 1); snakGroups.add(group); } return snakGroups; } private Statement getTestStatement(int subjectSeed, int seed, int size, String entityType) { List qualifiers = getTestValueSnakGroups(seed * 100, size); return new StatementImpl("", StatementRank.NORMAL, getTestValueSnak(ValueType.fromInt(seed), seed, seed), qualifiers, null, getTestEntityIdValue(subjectSeed, entityType)); } private StatementGroup getTestStatementGroup(int subjectSeed, int seed, int size, String entityType) { List statements = new ArrayList<>(size); for (int i = 0; i < size; i++) { statements.add(getTestStatement(subjectSeed, seed, i, entityType)); } return new StatementGroupImpl(statements); } } DatamodelFilterTest.java000066400000000000000000000455001444772566300360150ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; import static org.junit.Assert.assertEquals; public class DatamodelFilterTest { @Test public void testEmptyLanguageFilterForItem() { DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setLanguageFilter(Collections.emptySet()); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); ItemDocument itemDocument = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label en", "en"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Desc en", "en"), Datamodel.makeMonolingualTextValue("Desc he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Alias en", "en"), Datamodel.makeMonolingualTextValue("Alias de1", "de"), Datamodel.makeMonolingualTextValue("Alias de2", "de") ), Collections.emptyList(), Collections.emptyMap() ); ItemDocument itemDocumentFiltered = Datamodel.makeItemDocument(Datamodel.makeWikidataItemIdValue("Q42"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap() ); assertEquals(itemDocumentFiltered, filter.filter(itemDocument)); } @Test public void testLanguageFilterForItem() { Set languageFilter = new HashSet<>(); languageFilter.add("de"); languageFilter.add("he"); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setLanguageFilter(languageFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); ItemDocument itemDocument = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label en", "en"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Desc en", "en"), Datamodel.makeMonolingualTextValue("Desc he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Alias en", "en"), Datamodel.makeMonolingualTextValue("Alias de1", "de"), Datamodel.makeMonolingualTextValue("Alias de2", "de") ), Collections.emptyList(), Collections.emptyMap() ); ItemDocument itemDocumentFiltered = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Collections.singletonList( Datamodel.makeMonolingualTextValue("Desc he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Alias de1", "de"), Datamodel.makeMonolingualTextValue("Alias de2", "de") ), Collections.emptyList(), Collections.emptyMap() ); assertEquals(itemDocumentFiltered, filter.filter(itemDocument)); } @Test public void testLanguageFilterForProperty() { Set languageFilter = new HashSet<>(); languageFilter.add("de"); languageFilter.add("he"); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setLanguageFilter(languageFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); PropertyDocument propertyDocument = Datamodel.makePropertyDocument( Datamodel.makeWikidataPropertyIdValue("P42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label en", "en"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Desc en", "en"), Datamodel.makeMonolingualTextValue("Desc he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Alias en", "en"), Datamodel.makeMonolingualTextValue("Alias de1", "de"), Datamodel.makeMonolingualTextValue("Alias de2", "de") ), Collections.emptyList(), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_STRING) ); PropertyDocument propertyDocumentFiltered = Datamodel.makePropertyDocument( Datamodel.makeWikidataPropertyIdValue("P42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Collections.singletonList( Datamodel.makeMonolingualTextValue("Desc he", "he") ), Arrays.asList( Datamodel.makeMonolingualTextValue("Alias de1", "de"), Datamodel.makeMonolingualTextValue("Alias de2", "de") ), Collections.emptyList(), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_STRING) ); assertEquals(propertyDocumentFiltered, filter.filter(propertyDocument)); } @Test public void testLanguageFilterForMediaInfo() { Set languageFilter = new HashSet<>(); languageFilter.add("de"); languageFilter.add("he"); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setLanguageFilter(languageFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); MediaInfoDocument mediaInfoDocument = Datamodel.makeMediaInfoDocument( Datamodel.makeWikimediaCommonsMediaInfoIdValue("M42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label en", "en"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Collections.emptyList() ); MediaInfoDocument mediaInfoDocumentFiltered = Datamodel.makeMediaInfoDocument( Datamodel.makeWikimediaCommonsMediaInfoIdValue("M42"), Arrays.asList( Datamodel.makeMonolingualTextValue("Label de", "de"), Datamodel.makeMonolingualTextValue("Label he", "he") ), Collections.emptyList() ); assertEquals(mediaInfoDocumentFiltered, filter.filter(mediaInfoDocument)); } /** * Creates a statement group using the given property. The subject of the * statement group will be Wikidata's Q42. * * @param propertyIdValue the property to use for the main snak of the claim of the * statements in this statement group * @return the new statement group */ private StatementGroup makeTestStatementGroup( PropertyIdValue propertyIdValue, EntityIdValue subjectIdValue) { Statement statement = Datamodel.makeStatement( subjectIdValue, Datamodel.makeSomeValueSnak(propertyIdValue), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "statement-id-" + propertyIdValue.getId()); return Datamodel.makeStatementGroup(Collections.singletonList(statement)); } @Test public void testEmptyPropertyFilterForItem() { ItemIdValue s = Datamodel.makeWikidataItemIdValue("Q42"); PropertyIdValue p1 = Datamodel.makeWikidataPropertyIdValue("P1"); PropertyIdValue p2 = Datamodel.makeWikidataPropertyIdValue("P2"); PropertyIdValue p3 = Datamodel.makeWikidataPropertyIdValue("P3"); PropertyIdValue p4 = Datamodel.makeWikidataPropertyIdValue("P4"); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setPropertyFilter(Collections.emptySet()); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); ItemDocument itemDocument = Datamodel.makeItemDocument( s, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p2, s), makeTestStatementGroup(p3, s), makeTestStatementGroup(p4, s) ), Collections.emptyMap() ); ItemDocument itemDocumentFiltered = Datamodel.makeItemDocument(s, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap() ); assertEquals(itemDocumentFiltered, filter.filter(itemDocument)); } @Test public void testPropertyFilterForItem() { ItemIdValue s = Datamodel.makeWikidataItemIdValue("Q42"); PropertyIdValue p1 = Datamodel.makeWikidataPropertyIdValue("P1"); PropertyIdValue p2 = Datamodel.makeWikidataPropertyIdValue("P2"); PropertyIdValue p3 = Datamodel.makeWikidataPropertyIdValue("P3"); PropertyIdValue p4 = Datamodel.makeWikidataPropertyIdValue("P4"); Set propertyFilter = new HashSet<>(); propertyFilter.add(p1); propertyFilter.add(p3); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setPropertyFilter(propertyFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); ItemDocument itemDocument = Datamodel.makeItemDocument( s, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p2, s), makeTestStatementGroup(p3, s), makeTestStatementGroup(p4, s) ), Collections.emptyMap() ); ItemDocument itemDocumentFiltered = Datamodel.makeItemDocument( s, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p3, s) ), Collections.emptyMap() ); assertEquals(itemDocumentFiltered, filter.filter(itemDocument)); } @Test public void testPropertyFilterForProperty() { PropertyIdValue s = Datamodel.makeWikidataPropertyIdValue("P42"); PropertyIdValue p1 = Datamodel.makeWikidataPropertyIdValue("P1"); PropertyIdValue p2 = Datamodel.makeWikidataPropertyIdValue("P2"); PropertyIdValue p3 = Datamodel.makeWikidataPropertyIdValue("P3"); PropertyIdValue p4 = Datamodel.makeWikidataPropertyIdValue("P4"); Set propertyFilter = new HashSet<>(); propertyFilter.add(p1); propertyFilter.add(p3); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setPropertyFilter(propertyFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); PropertyDocument propertyDocument = Datamodel.makePropertyDocument( s, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p2, s), makeTestStatementGroup(p3, s), makeTestStatementGroup(p4, s) ), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_STRING) ); PropertyDocument propertyDocumentFiltered = Datamodel.makePropertyDocument( s, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p3, s) ), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_STRING) ); assertEquals(propertyDocumentFiltered, filter.filter(propertyDocument)); } @Test public void testPropertyFilterForLexeme() { LexemeIdValue l = Datamodel.makeWikidataLexemeIdValue("L42"); FormIdValue f = Datamodel.makeWikidataFormIdValue("L42-F1"); SenseIdValue s = Datamodel.makeWikidataSenseIdValue("L42-S1"); PropertyIdValue p1 = Datamodel.makeWikidataPropertyIdValue("P1"); PropertyIdValue p2 = Datamodel.makeWikidataPropertyIdValue("P2"); PropertyIdValue p3 = Datamodel.makeWikidataPropertyIdValue("P3"); PropertyIdValue p4 = Datamodel.makeWikidataPropertyIdValue("P4"); Set propertyFilter = new HashSet<>(); propertyFilter.add(p1); propertyFilter.add(p3); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setPropertyFilter(propertyFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); LexemeDocument lexemeDocument = Datamodel.makeLexemeDocument( l, Datamodel.makeWikidataItemIdValue("Q1"), Datamodel.makeWikidataItemIdValue("Q1"), Collections.singletonList(Datamodel.makeMonolingualTextValue("foo", "en")), Arrays.asList( makeTestStatementGroup(p1, l), makeTestStatementGroup(p2, l), makeTestStatementGroup(p3, l), makeTestStatementGroup(p4, l) ), Collections.singletonList(Datamodel.makeFormDocument( f, Collections.singletonList(Datamodel.makeMonolingualTextValue("foo", "en")), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, f), makeTestStatementGroup(p2, f), makeTestStatementGroup(p3, f) ) )), Collections.singletonList(Datamodel.makeSenseDocument( s, Collections.singletonList(Datamodel.makeMonolingualTextValue("foo", "en")), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p2, s), makeTestStatementGroup(p3, s) ) )) ); LexemeDocument lexemeDocumentFiltered = Datamodel.makeLexemeDocument( l, Datamodel.makeWikidataItemIdValue("Q1"), Datamodel.makeWikidataItemIdValue("Q1"), Collections.singletonList(Datamodel.makeMonolingualTextValue("foo", "en")), Arrays.asList( makeTestStatementGroup(p1, l), makeTestStatementGroup(p3, l) ), Collections.singletonList(Datamodel.makeFormDocument( f, Collections.singletonList(Datamodel.makeMonolingualTextValue("foo", "en")), Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, f), makeTestStatementGroup(p3, f) ) )), Collections.singletonList(Datamodel.makeSenseDocument( s, Collections.singletonList(Datamodel.makeMonolingualTextValue("foo", "en")), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p3, s) ) )) ); assertEquals(lexemeDocumentFiltered, filter.filter(lexemeDocument)); } @Test public void testPropertyFilterForMediaInfo() { MediaInfoIdValue s = Datamodel.makeWikimediaCommonsMediaInfoIdValue("M42"); PropertyIdValue p1 = Datamodel.makeWikidataPropertyIdValue("P1"); PropertyIdValue p2 = Datamodel.makeWikidataPropertyIdValue("P2"); PropertyIdValue p3 = Datamodel.makeWikidataPropertyIdValue("P3"); PropertyIdValue p4 = Datamodel.makeWikidataPropertyIdValue("P4"); Set propertyFilter = new HashSet<>(); propertyFilter.add(p1); propertyFilter.add(p3); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setPropertyFilter(propertyFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); MediaInfoDocument mediaInfoDocument = Datamodel.makeMediaInfoDocument( s, Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p2, s), makeTestStatementGroup(p3, s), makeTestStatementGroup(p4, s) ) ); MediaInfoDocument mediaInfoDocumentFiltered = Datamodel.makeMediaInfoDocument( s, Collections.emptyList(), Arrays.asList( makeTestStatementGroup(p1, s), makeTestStatementGroup(p3, s) ) ); assertEquals(mediaInfoDocumentFiltered, filter.filter(mediaInfoDocument)); } @Test public void testEmptySiteLinkFilterForItem() { SiteLink s1 = Datamodel.makeSiteLink("Title 1", "site1", Collections.emptyList()); SiteLink s2 = Datamodel.makeSiteLink("Title 2", "site2", Collections.emptyList()); SiteLink s3 = Datamodel.makeSiteLink("Title 3", "site3", Collections.emptyList()); SiteLink s4 = Datamodel.makeSiteLink("Title 4", "site4", Collections.emptyList()); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setSiteLinkFilter(Collections.emptySet()); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); Map siteLinks = new HashMap<>(); siteLinks.put(s1.getSiteKey(), s1); siteLinks.put(s2.getSiteKey(), s2); siteLinks.put(s3.getSiteKey(), s3); siteLinks.put(s4.getSiteKey(), s4); ItemDocument itemDocument = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), siteLinks ); ItemDocument itemDocumentFiltered = Datamodel.makeItemDocument(Datamodel.makeWikidataItemIdValue("Q42"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap() ); assertEquals(itemDocumentFiltered, filter.filter(itemDocument)); } @Test public void testSiteLinkFilterForItem() { SiteLink s1 = Datamodel.makeSiteLink("Title 1", "site1", Collections.emptyList()); SiteLink s2 = Datamodel.makeSiteLink("Title 2", "site2", Collections.emptyList()); SiteLink s3 = Datamodel.makeSiteLink("Title 3", "site3", Collections.emptyList()); SiteLink s4 = Datamodel.makeSiteLink("Title 4", "site4", Collections.emptyList()); Set siteLinkFilter = new HashSet<>(); siteLinkFilter.add("site2"); siteLinkFilter.add("site4"); DocumentDataFilter documentDataFilter = new DocumentDataFilter(); documentDataFilter.setSiteLinkFilter(siteLinkFilter); DatamodelFilter filter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); Map siteLinks = new HashMap<>(); siteLinks.put(s1.getSiteKey(), s1); siteLinks.put(s2.getSiteKey(), s2); siteLinks.put(s3.getSiteKey(), s3); siteLinks.put(s4.getSiteKey(), s4); Map siteLinksFiltered = new HashMap<>(); siteLinksFiltered.put(s2.getSiteKey(), s2); siteLinksFiltered.put(s4.getSiteKey(), s4); ItemDocument itemDocument = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), siteLinks ); ItemDocument itemDocumentFiltered = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), siteLinksFiltered ); assertEquals(itemDocumentFiltered, filter.filter(itemDocument)); } } DatamodelTest.java000066400000000000000000000377111444772566300346540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.junit.Assert.assertEquals; import java.math.BigDecimal; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; public class DatamodelTest { private final DataObjectFactory factory = new DataObjectFactoryImpl(); @Test public final void testGetItemId() { ItemIdValue o1 = Datamodel.makeItemIdValue("Q42", "foo"); ItemIdValue o2 = factory.getItemIdValue("Q42", "foo"); assertEquals(o1, o2); } @Test public final void testGetWikidataItemId() { ItemIdValue o1 = Datamodel.makeWikidataItemIdValue("Q42"); ItemIdValue o2 = factory.getItemIdValue("Q42", "http://www.wikidata.org/entity/"); assertEquals(o1, o2); } @Test public final void testGetLexemeId() { LexemeIdValue o1 = Datamodel.makeLexemeIdValue("L42", "foo"); LexemeIdValue o2 = factory.getLexemeIdValue("L42", "foo"); assertEquals(o1, o2); } @Test public final void testGetWikidataLexemeId() { LexemeIdValue o1 = Datamodel.makeWikidataLexemeIdValue("L42"); LexemeIdValue o2 = factory.getLexemeIdValue("L42", "http://www.wikidata.org/entity/"); assertEquals(o1, o2); } @Test public final void testGetPropertyId() { PropertyIdValue o1 = Datamodel.makePropertyIdValue("P42", "foo"); PropertyIdValue o2 = factory.getPropertyIdValue("P42", "foo"); assertEquals(o1, o2); } @Test public final void testGetWikidataPropertyId() { PropertyIdValue o1 = Datamodel.makeWikidataPropertyIdValue("P42"); PropertyIdValue o2 = factory.getPropertyIdValue("P42", "http://www.wikidata.org/entity/"); assertEquals(o1, o2); } @Test public final void testGetFormId() { FormIdValue o1 = Datamodel.makeFormIdValue("L42-F1", "foo"); FormIdValue o2 = factory.getFormIdValue("L42-F1", "foo"); assertEquals(o1, o2); } @Test public final void testGetWikidataFormId() { FormIdValue o1 = Datamodel.makeWikidataFormIdValue("L42-F1"); FormIdValue o2 = factory.getFormIdValue("L42-F1", "http://www.wikidata.org/entity/"); assertEquals(o1, o2); } @Test public final void testGetSenseId() { SenseIdValue o1 = Datamodel.makeSenseIdValue("L42-S1", "foo"); SenseIdValue o2 = factory.getSenseIdValue("L42-S1", "foo"); assertEquals(o1, o2); } @Test public final void testGetWikidataSenseId() { SenseIdValue o1 = Datamodel.makeWikidataSenseIdValue("L42-S1"); SenseIdValue o2 = factory.getSenseIdValue("L42-S1", "http://www.wikidata.org/entity/"); assertEquals(o1, o2); } @Test public final void testGetMediaInfoId() { MediaInfoIdValue o1 = Datamodel.makeMediaInfoIdValue("M42", "foo"); MediaInfoIdValue o2 = factory.getMediaInfoIdValue("M42", "foo"); assertEquals(o1, o2); } @Test public final void testGetWikimediaCommonsMediaInfoId() { MediaInfoIdValue o1 = Datamodel.makeWikimediaCommonsMediaInfoIdValue("M42"); MediaInfoIdValue o2 = factory.getMediaInfoIdValue("M42", "http://commons.wikimedia.org/entity/"); assertEquals(o1, o2); } @Test public final void testGetDatatypeId() { DatatypeIdValue o1 = Datamodel .makeDatatypeIdValueFromJsonString(DatatypeIdValue.JSON_DT_TIME); DatatypeIdValue o2 = factory .getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_TIME); assertEquals(o1, o2); } @Test public final void testGetTimeValue() { TimeValue o1 = Datamodel.makeTimeValue(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_DAY, 0, 0, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue o2 = factory.getTimeValue(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_DAY, 0, 0, 60, TimeValue.CM_GREGORIAN_PRO); assertEquals(o1, o2); } @Test public final void testGetTimeValueTime() { TimeValue o1 = Datamodel .makeTimeValue(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue o2 = factory.getTimeValue(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 0, 60, TimeValue.CM_GREGORIAN_PRO); assertEquals(o1, o2); } @Test public final void testGetTimeValueDate() { TimeValue o1 = Datamodel.makeTimeValue(2007, (byte) 5, (byte) 12, TimeValue.CM_GREGORIAN_PRO); TimeValue o2 = factory.getTimeValue(2007, (byte) 5, (byte) 12, (byte) 0, (byte) 0, (byte) 0, TimeValue.PREC_DAY, 0, 0, 0, TimeValue.CM_GREGORIAN_PRO); assertEquals(o1, o2); } @Test public final void testGetGlobeCoordinatesValue() { GlobeCoordinatesValue o1 = Datamodel.makeGlobeCoordinatesValue(90.0, 190.5, GlobeCoordinatesValue.PREC_DECI_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); GlobeCoordinatesValue o2 = factory.getGlobeCoordinatesValue(90.0, 190.5, GlobeCoordinatesValue.PREC_DECI_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); assertEquals(o1, o2); } @Test public final void testGetStringValue() { StringValue o1 = Datamodel.makeStringValue("foo"); StringValue o2 = factory.getStringValue("foo"); assertEquals(o1, o2); } @Test public final void testGetMonolingualTextValue() { MonolingualTextValue o1 = Datamodel.makeMonolingualTextValue("foo", "en"); MonolingualTextValue o2 = factory.getMonolingualTextValue("foo", "en"); assertEquals(o1, o2); } @Test public final void testGetQuantityValueItemIdValue() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); BigDecimal lb = new BigDecimal( "0.123456789012345678901234567890123456788"); BigDecimal ub = new BigDecimal( "0.123456789012345678901234567890123456790"); ItemIdValue unit = factory.getItemIdValue("Q1", "http://www.wikidata.org/entity/"); QuantityValue o1 = Datamodel.makeQuantityValue(nv, lb, ub, unit); QuantityValue o2 = factory.getQuantityValue(nv, lb, ub, unit); assertEquals(o1, o2); } @Test public final void testGetQuantityValueNoUnit() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); BigDecimal lb = new BigDecimal( "0.123456789012345678901234567890123456788"); BigDecimal ub = new BigDecimal( "0.123456789012345678901234567890123456790"); QuantityValue o1 = Datamodel.makeQuantityValue(nv, lb, ub); QuantityValue o2 = factory.getQuantityValue(nv, lb, ub); assertEquals(o1, o2); } @Test public final void testGetQuantityValueNoBoundsItemIdValue() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); ItemIdValue unit = factory.getItemIdValue("Q1", "http://www.wikidata.org/entity/"); QuantityValue o1 = Datamodel.makeQuantityValue(nv, unit); QuantityValue o2 = factory.getQuantityValue(nv, unit); assertEquals(o1, o2); } @Test public final void testGetQuantityValueNoBoundsAndUnits() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); QuantityValue o1 = Datamodel.makeQuantityValue(nv); QuantityValue o2 = factory.getQuantityValue(nv); assertEquals(o1, o2); } @Test public final void testGetLongQuantityValue() { BigDecimal nv = new BigDecimal("1234567890123456789"); BigDecimal lb = new BigDecimal("1234567890123456788"); BigDecimal ub = new BigDecimal("1234567890123456790"); QuantityValue o1 = Datamodel.makeQuantityValue(1234567890123456789L, 1234567890123456788L, 1234567890123456790L); QuantityValue o2 = factory.getQuantityValue(nv, lb, ub); assertEquals(o1, o2); } @Test public final void testGetValueSnak() { ValueSnak o1 = Datamodel.makeValueSnak( factory.getPropertyIdValue("P42", "foo"), factory.getStringValue("foo")); ValueSnak o2 = factory.getValueSnak( factory.getPropertyIdValue("P42", "foo"), factory.getStringValue("foo")); assertEquals(o1, o2); } @Test public final void testGetSomeValueSnak() { SomeValueSnak o1 = Datamodel.makeSomeValueSnak(factory .getPropertyIdValue("P42", "foo")); SomeValueSnak o2 = factory.getSomeValueSnak(factory.getPropertyIdValue( "P42", "foo")); assertEquals(o1, o2); } @Test public final void testGetNoValueSnak() { NoValueSnak o1 = Datamodel.makeNoValueSnak(factory.getPropertyIdValue( "P42", "foo")); NoValueSnak o2 = factory.getNoValueSnak(factory.getPropertyIdValue( "P42", "foo")); assertEquals(o1, o2); } @Test public final void testGetSnakGroup() { Snak s = factory.getNoValueSnak(factory .getPropertyIdValue("P42", "foo")); SnakGroup o1 = Datamodel.makeSnakGroup(Collections .singletonList(s)); SnakGroup o2 = factory .getSnakGroup(Collections.singletonList(s)); assertEquals(o1, o2); } @Test public final void testGetClaim() { Claim o1 = Datamodel .makeClaim(factory.getItemIdValue("Q42", "foo"), factory .getNoValueSnak(factory .getPropertyIdValue("P42", "foo")), Collections .emptyList()); Claim o2 = factory .getClaim(factory.getItemIdValue("Q42", "foo"), factory .getNoValueSnak(factory .getPropertyIdValue("P42", "foo")), Collections .emptyList()); assertEquals(o1, o2); } @Test public final void testGetReference() { Reference r1 = Datamodel.makeReference(Collections .emptyList()); Reference r2 = factory .getReference(Collections.emptyList()); assertEquals(r1, r2); } @Test public final void testGetStatement() { Statement o1 = Datamodel.makeStatement( factory.getItemIdValue("Q42", "foo"), factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "MyId"); Statement o2 = factory.getStatement( factory.getItemIdValue("Q42", "foo"), factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "MyId"); assertEquals(o1, o2); } @Test public final void testGetStatementGroup() { Statement s = Datamodel.makeStatement( factory.getItemIdValue("Q42", "foo"), factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "MyId"); StatementGroup o1 = Datamodel.makeStatementGroup(Collections.singletonList(s)); StatementGroup o2 = factory.getStatementGroup(Collections.singletonList(s)); assertEquals(o1, o2); } @Test public final void testGetSiteLink() { SiteLink o1 = Datamodel.makeSiteLink("SOLID", "enwiki", Collections.emptyList()); SiteLink o2 = factory.getSiteLink("SOLID", "enwiki", Collections.emptyList()); SiteLink o3 = Datamodel.makeSiteLink("SOLID", "enwiki"); assertEquals(o1, o2); assertEquals(o3, o2); } @Test public final void testGetPropertyDocument() { PropertyDocument o1 = Datamodel.makePropertyDocument( factory.getPropertyIdValue("P42", "foo"), factory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_TIME)); PropertyDocument o2 = factory.getPropertyDocument( factory.getPropertyIdValue("P42", "foo"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), factory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_TIME), 0); assertEquals(o1, o2); } @Test public final void testGetItemDocument() { ItemDocument o1 = Datamodel.makeItemDocument( factory.getItemIdValue("Q42", "foo")); ItemDocument o2 = factory.getItemDocument( factory.getItemIdValue("Q42", "foo"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), 0); assertEquals(o1, o2); } @Test public final void testGetLexemeDocument() { LexemeDocument o1 = Datamodel.makeLexemeDocument( factory.getLexemeIdValue("L42", "foo"), factory.getItemIdValue("Q1", "foo"), factory.getItemIdValue("Q2", "foo"), Collections.singletonList(factory.getMonolingualTextValue("foo", "en")), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); LexemeDocument o2 = factory.getLexemeDocument( factory.getLexemeIdValue("L42", "foo"), factory.getItemIdValue("Q1", "foo"), factory.getItemIdValue("Q2", "foo"), Collections.singletonList(factory.getMonolingualTextValue("foo", "en")), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); assertEquals(o1, o2); } @Test public final void testGetFormDocument() { FormDocument o1 = Datamodel.makeFormDocument( factory.getFormIdValue("L42-F1", "foo"), Collections.singletonList(factory.getMonolingualTextValue("en", "foo")), Collections.singletonList(factory.getItemIdValue("Q1", "foo")), Collections.emptyList()); FormDocument o2 = factory.getFormDocument( factory.getFormIdValue("L42-F1", "foo"), Collections.singletonList(factory.getMonolingualTextValue("en", "foo")), Collections.singletonList(factory.getItemIdValue("Q1", "foo")), Collections.emptyList(), 0); assertEquals(o1, o2); } @Test public final void testGetSenseDocument() { SenseDocument o1 = Datamodel.makeSenseDocument( factory.getSenseIdValue("L42-S1", "foo"), Collections.singletonList(factory.getMonolingualTextValue("en", "foo")), Collections.emptyList()); SenseDocument o2 = factory.getSenseDocument( factory.getSenseIdValue("L42-S1", "foo"), Collections.singletonList(factory.getMonolingualTextValue("en", "foo")), Collections.emptyList(), 0); assertEquals(o1, o2); } @Test public final void testGetMediaInfoDocument() { MediaInfoDocument o1 = Datamodel.makeMediaInfoDocument( factory.getMediaInfoIdValue("M42", "foo"), Collections.emptyList(), Collections.emptyList()); MediaInfoDocument o2 = factory.getMediaInfoDocument( factory.getMediaInfoIdValue("M42", "foo"), Collections.emptyList(), Collections.emptyList(), 0); assertEquals(o1, o2); } } DatatypeConvertersTest.java000066400000000000000000000025411444772566300366010ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.math.BigDecimal; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; public class DatatypeConvertersTest { @Test public void testFormatTimeISO8601() { TimeValue time = Datamodel.makeTimeValue(306, (byte) 11, (byte) 3, (byte) 13, (byte) 7, (byte) 6, TimeValue.PREC_SECOND, 0, 0, 0, TimeValue.CM_GREGORIAN_PRO); assertEquals(DataFormatter.formatTimeISO8601(time), "+00000000306-11-03T13:07:06Z"); } @Test public void testBigDecimals() { BigDecimal test = new BigDecimal(3638); assertEquals(DataFormatter.formatBigDecimal(test), "+3638"); } } EntityUpdateBuilderTest.java000066400000000000000000000133341444772566300367030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.*; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; public class EntityUpdateBuilderTest { static final ItemIdValue Q1 = Datamodel.makeWikidataItemIdValue("Q1"); static final PropertyIdValue P1 = Datamodel.makeWikidataPropertyIdValue("P1"); static final MediaInfoIdValue M1 = Datamodel.makeWikimediaCommonsMediaInfoIdValue("M1"); static final LexemeIdValue L1 = Datamodel.makeWikidataLexemeIdValue("L1"); static final FormIdValue F1 = Datamodel.makeWikidataFormIdValue("L1-F1"); static final SenseIdValue S1 = Datamodel.makeWikidataSenseIdValue("L1-S1"); static final ItemDocument ITEM = Datamodel.makeItemDocument(Q1).withRevisionId(123); static final PropertyDocument PROPERTY = Datamodel.makePropertyDocument( P1, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_ITEM)); static final MediaInfoDocument MEDIA = Datamodel.makeMediaInfoDocument(M1); static final LexemeDocument LEXEME = Datamodel.makeLexemeDocument(L1, Q1, Q1, Collections.emptyList()); static final FormDocument FORM = Datamodel.makeFormDocument(F1, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); static final SenseDocument SENSE = Datamodel.makeSenseDocument(S1, Collections.emptyList(), Collections.emptyList()); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> EntityUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> EntityUpdateBuilder.forEntityId(ItemIdValue.NULL)); assertThat(EntityUpdateBuilder.forEntityId(Q1), is(instanceOf(ItemUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forEntityId(P1), is(instanceOf(PropertyUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forEntityId(M1), is(instanceOf(MediaInfoUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forEntityId(L1), is(instanceOf(LexemeUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forEntityId(F1), is(instanceOf(FormUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forEntityId(S1), is(instanceOf(SenseUpdateBuilder.class))); EntityUpdateBuilder builder = EntityUpdateBuilder.forEntityId(Q1); assertEquals(Q1, builder.getEntityId()); assertNull(builder.getBaseRevision()); assertEquals(0, builder.getBaseRevisionId()); } @Test public void testForBaseRevisionId() { EntityUpdateBuilder builder = EntityUpdateBuilder.forBaseRevisionId(Q1, 123); assertEquals(Q1, builder.getEntityId()); assertNull(builder.getBaseRevision()); assertEquals(123, builder.getBaseRevisionId()); assertEquals(123, EntityUpdateBuilder.forBaseRevisionId(Q1, 123).getBaseRevisionId()); assertEquals(123, EntityUpdateBuilder.forBaseRevisionId(P1, 123).getBaseRevisionId()); assertEquals(123, EntityUpdateBuilder.forBaseRevisionId(M1, 123).getBaseRevisionId()); assertEquals(123, EntityUpdateBuilder.forBaseRevisionId(L1, 123).getBaseRevisionId()); assertEquals(123, EntityUpdateBuilder.forBaseRevisionId(F1, 123).getBaseRevisionId()); assertEquals(123, EntityUpdateBuilder.forBaseRevisionId(S1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> EntityUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> EntityUpdateBuilder.forBaseRevision(Datamodel.makeItemDocument(ItemIdValue.NULL))); assertThat(EntityUpdateBuilder.forBaseRevision(ITEM), is(instanceOf(ItemUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forBaseRevision(PROPERTY), is(instanceOf(PropertyUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forBaseRevision(MEDIA), is(instanceOf(MediaInfoUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forBaseRevision(LEXEME), is(instanceOf(LexemeUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forBaseRevision(FORM), is(instanceOf(FormUpdateBuilder.class))); assertThat(EntityUpdateBuilder.forBaseRevision(SENSE), is(instanceOf(SenseUpdateBuilder.class))); EntityUpdateBuilder builder = EntityUpdateBuilder.forBaseRevision(ITEM); assertEquals(Q1, builder.getEntityId()); assertSame(ITEM, builder.getBaseRevision()); assertEquals(123, builder.getBaseRevisionId()); } } FormUpdateBuilderTest.java000066400000000000000000000145761444772566300363430ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.*; import java.util.Arrays; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class FormUpdateBuilderTest { private static final FormIdValue F1 = EntityUpdateBuilderTest.F1; private static final FormDocument FORM = EntityUpdateBuilderTest.FORM; private static final Statement F1_DESCRIBES_SOMETHING = StatementBuilder .forSubjectAndProperty(F1, Datamodel.makeWikidataPropertyIdValue("P1")) .withValue(Datamodel.makeStringValue("something")) .build(); private static final Statement F1_EVOKES_FEELING = StatementBuilder .forSubjectAndProperty(F1, Datamodel.makeWikidataPropertyIdValue("P2")) .withValue(Datamodel.makeStringValue("feeling")) .build(); private static final MonolingualTextValue EN = TermUpdateBuilderTest.EN; private static final MonolingualTextValue SK = TermUpdateBuilderTest.SK; private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1; private static final ItemIdValue Q2 = Datamodel.makeWikidataItemIdValue("Q2"); private static final ItemIdValue Q3 = Datamodel.makeWikidataItemIdValue("Q3"); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> FormUpdateBuilder.forEntityId(FormIdValue.NULL)); FormUpdateBuilder.forEntityId(F1); } @Test public void testForBaseRevisionId() { assertEquals(123, FormUpdateBuilder.forBaseRevisionId(F1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> FormUpdateBuilder.forBaseRevision(FORM.withEntityId(FormIdValue.NULL))); FormUpdateBuilder.forBaseRevision(FORM); } @Test public void testStatementUpdate() { FormUpdate update = FormUpdateBuilder.forEntityId(F1) .updateStatements(StatementUpdateBuilder.create().add(F1_DESCRIBES_SOMETHING).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(F1_DESCRIBES_SOMETHING)); } @Test public void testBlindRepresentationUpdate() { assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forEntityId(F1).updateRepresentations(null)); FormUpdate update = FormUpdateBuilder.forEntityId(F1) .updateRepresentations(TermUpdateBuilder.create().remove("en").build()) .updateRepresentations(TermUpdateBuilder.create().remove("sk").build()) .build(); assertThat(update.getRepresentations().getRemoved(), containsInAnyOrder("en", "sk")); } @Test public void testBaseRepresentationUpdate() { FormUpdate update = FormUpdateBuilder .forBaseRevision(FORM .withRepresentation(EN) .withRepresentation(SK)) .updateRepresentations(TermUpdateBuilder.create() .put(SK) // ignored .remove("en") // checked .build()) .build(); assertThat(update.getRepresentations().getModified(), is(anEmptyMap())); assertThat(update.getRepresentations().getRemoved(), containsInAnyOrder("en")); } @Test public void testBlindFeatureChange() { FormUpdateBuilder builder = FormUpdateBuilder.forEntityId(F1); assertThrows(NullPointerException.class, () -> builder.setGrammaticalFeatures(null)); assertThrows(NullPointerException.class, () -> builder.setGrammaticalFeatures(Arrays.asList(Q1, null))); assertThrows(IllegalArgumentException.class, () -> builder.setGrammaticalFeatures(Arrays.asList(ItemIdValue.NULL))); assertThrows(IllegalArgumentException.class, () -> builder.setGrammaticalFeatures(Arrays.asList(Q1, Q1))); assertFalse(builder.build().getGrammaticalFeatures().isPresent()); FormUpdate update = builder.setGrammaticalFeatures(Arrays.asList(Q1, Q2)).build(); assertThat(update.getGrammaticalFeatures().get(), containsInAnyOrder(Q1, Q2)); } @Test public void testBaseFeatureChange() { FormDocument base = FORM .withGrammaticalFeature(Q1) .withGrammaticalFeature(Q2); assertFalse(FormUpdateBuilder.forBaseRevision(base).build().getGrammaticalFeatures().isPresent()); assertFalse(FormUpdateBuilder.forBaseRevision(base) .setGrammaticalFeatures(Arrays.asList(Q1, Q2)) .build() .getGrammaticalFeatures().isPresent()); FormUpdate update = FormUpdateBuilder.forBaseRevision(base) .setGrammaticalFeatures(Arrays.asList(Q2, Q3)) .build(); assertThat(update.getGrammaticalFeatures().get(), containsInAnyOrder(Q2, Q3)); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> FormUpdateBuilder.forEntityId(F1).append(null)); FormUpdate update = FormUpdateBuilder.forEntityId(F1) .updateStatements(StatementUpdateBuilder.create().add(F1_DESCRIBES_SOMETHING).build()) .updateRepresentations(TermUpdateBuilder.create().remove("en").build()) .append(FormUpdateBuilder.forEntityId(F1) .updateStatements(StatementUpdateBuilder.create().add(F1_EVOKES_FEELING).build()) .updateRepresentations(TermUpdateBuilder.create().remove("sk").build()) .build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(F1_DESCRIBES_SOMETHING, F1_EVOKES_FEELING)); assertThat(update.getRepresentations().getRemoved(), containsInAnyOrder("en", "sk")); } } ItemDocumentBuilderTest.java000066400000000000000000000117421444772566300366620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.util.Arrays; import java.util.Collections; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; public class ItemDocumentBuilderTest { ItemIdValue i; StatementGroup sg; Statement s2; Statement s1; @Before public void setUp() { i = ItemIdValue.NULL; s1 = StatementBuilder.forSubjectAndProperty(i, Datamodel.makeWikidataPropertyIdValue("P1")).build(); s2 = StatementBuilder .forSubjectAndProperty(i, Datamodel.makeWikidataPropertyIdValue("P1")) .withValue(i).build(); sg = Datamodel.makeStatementGroup(Arrays.asList(s1, s2)); } @Test public void testEmptyItemDocumentBuild() { ItemDocument id1 = Datamodel.makeItemDocument(ItemIdValue.NULL, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), 0); ItemDocument id2 = ItemDocumentBuilder.forItemId(ItemIdValue.NULL) .build(); assertEquals(id1, id2); } @Test public void testComplexItemDocumentBuild() { MonolingualTextValue mtv = Datamodel.makeMonolingualTextValue("Test", "de"); SiteLink sl = Datamodel.makeSiteLink("Test", "frwiki", Collections.singletonList(Datamodel.makeWikidataItemIdValue("Q42"))); ItemDocument id1 = Datamodel.makeItemDocument(i, Collections.singletonList(mtv), Collections.singletonList(mtv), Collections.singletonList(mtv), Collections.singletonList(sg), Collections.singletonMap("frwiki", sl), 1234); ItemDocument id2 = ItemDocumentBuilder.forItemId(i) .withLabel("Test", "de").withDescription("Test", "de") .withAlias("Test", "de") .withSiteLink("Test", "frwiki", Datamodel.makeWikidataItemIdValue("Q42")).withStatement(s1) .withStatement(s2).withRevisionId(1234).build(); assertEquals(id1, id2); } @Test public void testModifyingBuild() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("canneberge", "fr"); MonolingualTextValue alias1 = Datamodel.makeMonolingualTextValue("grande airelle rouge d’Amérique du Nord", "fr"); MonolingualTextValue alias2 = Datamodel.makeMonolingualTextValue("atoca", "fr"); SiteLink sl = Datamodel.makeSiteLink("Canneberge", "frwiki", Collections.singletonList(Datamodel.makeWikidataItemIdValue("Q42"))); ItemDocument initial = Datamodel.makeItemDocument(i, Collections.singletonList(label), Collections.emptyList(), Arrays.asList(alias1, alias2), Collections.singletonList(sg), Collections.singletonMap("frwiki", sl), 1234); ItemDocument copy = ItemDocumentBuilder.fromItemDocument(initial).build(); assertEquals(copy, initial); MonolingualTextValue alias3 = Datamodel.makeMonolingualTextValue("cranberry", "fr"); ItemDocument withAlias = ItemDocumentBuilder.fromItemDocument(initial).withAlias(alias3).build(); assertEquals(withAlias.getAliases().get("fr"), Arrays.asList(alias1, alias2, alias3)); } @Test public void testChangeOfSubjectId() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("pleutre", "fr"); ItemDocument initial = Datamodel.makeItemDocument(i, Collections.singletonList(label), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(sg), Collections.emptyMap(), 4567); ItemDocument copy = ItemDocumentBuilder.fromItemDocument(initial).withEntityId(ItemIdValue.NULL).build(); assertEquals(ItemIdValue.NULL, copy.getEntityId()); assertEquals("pleutre", copy.findLabel("fr")); } @Test(expected = IllegalArgumentException.class) public void testInvalidChangeOfSubjectId() { ItemDocumentBuilder.forItemId(ItemIdValue.NULL).withRevisionId(1234).withEntityId(PropertyIdValue.NULL); } @Test(expected = IllegalStateException.class) public void testDoubleBuild() { ItemDocumentBuilder b = ItemDocumentBuilder.forItemId(ItemIdValue.NULL); b.build(); b.build(); } } ItemUpdateBuilderTest.java000066400000000000000000000167131444772566300363310ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.*; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class ItemUpdateBuilderTest { private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1; private static final ItemDocument ITEM = EntityUpdateBuilderTest.ITEM; private static final Statement JOHN_HAS_BROWN_HAIR = StatementUpdateBuilderTest.JOHN_HAS_BROWN_HAIR; private static final SiteLink EN = Datamodel.makeSiteLink("hello", "enwiki"); private static final SiteLink EN2 = Datamodel.makeSiteLink("hi", "enwiki"); private static final SiteLink SK = Datamodel.makeSiteLink("ahoj", "skwiki"); private static final SiteLink CS = Datamodel.makeSiteLink("nazdar", "cswiki"); private static final SiteLink DE = Datamodel.makeSiteLink("Hallo", "dewiki"); private static final SiteLink DE2 = Datamodel.makeSiteLink("Guten Tag", "dewiki"); private static final SiteLink FR = Datamodel.makeSiteLink("Bonjour", "frwiki"); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> ItemUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> ItemUpdateBuilder.forEntityId(PropertyIdValue.NULL)); ItemUpdateBuilder.forEntityId(Q1); } @Test public void testForBaseRevisionId() { assertEquals(123, ItemUpdateBuilder.forBaseRevisionId(Q1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> ItemUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> ItemUpdateBuilder.forBaseRevision(Datamodel.makeItemDocument(ItemIdValue.NULL))); ItemUpdateBuilder.forBaseRevision(ITEM); } @Test public void testStatementUpdate() { ItemUpdate update = ItemUpdateBuilder.forEntityId(Q1) .updateStatements(StatementUpdateBuilder.create().add(JOHN_HAS_BROWN_HAIR).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(JOHN_HAS_BROWN_HAIR)); } @Test public void testLabelUpdate() { ItemUpdate update = ItemUpdateBuilder.forEntityId(Q1) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en")); } @Test public void testDescriptionUpdate() { ItemUpdate update = ItemUpdateBuilder.forEntityId(Q1) .updateDescriptions(TermUpdateBuilder.create().remove("en").build()) .build(); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("en")); } @Test public void testAliasUpdate() { ItemUpdate update = ItemUpdateBuilder.forEntityId(Q1) .updateAliases("sk", AliasUpdateBuilder.create().add(TermUpdateBuilderTest.SK).build()) .build(); assertThat(update.getAliases().keySet(), containsInAnyOrder("sk")); } @Test public void testBlindSiteLinkAssignment() { ItemUpdateBuilder builder = ItemUpdateBuilder.forEntityId(Q1); assertThrows(NullPointerException.class, () -> builder.putSiteLink(null)); builder.removeSiteLink("skwiki"); builder.removeSiteLink("dewiki"); builder.putSiteLink(EN); // simple case builder.putSiteLink(SK); // previously removed ItemUpdate update = builder.build(); assertThat(update.getRemovedSiteLinks(), containsInAnyOrder("dewiki")); assertThat(update.getModifiedSiteLinks().keySet(), containsInAnyOrder("skwiki", "enwiki")); assertEquals(EN, update.getModifiedSiteLinks().get("enwiki")); assertEquals(SK, update.getModifiedSiteLinks().get("skwiki")); } @Test public void testBlindSiteLinkRemoval() { ItemUpdateBuilder builder = ItemUpdateBuilder.forEntityId(Q1); assertThrows(NullPointerException.class, () -> builder.removeSiteLink(null)); assertThrows(IllegalArgumentException.class, () -> builder.removeSiteLink(" ")); builder.putSiteLink(EN); builder.putSiteLink(SK); builder.removeSiteLink("dewiki"); // simple case builder.removeSiteLink("skwiki"); // previously assigned ItemUpdate update = builder.build(); assertThat(update.getRemovedSiteLinks(), containsInAnyOrder("skwiki", "dewiki")); assertThat(update.getModifiedSiteLinks().keySet(), containsInAnyOrder("enwiki")); } @Test public void testBaseSiteLinkAssignment() { ItemUpdateBuilder builder = ItemUpdateBuilder.forBaseRevision(ItemDocumentBuilder.fromItemDocument(ITEM) .withSiteLink(SK) .withSiteLink(EN) .withSiteLink(DE) .withSiteLink(CS) .build()); builder.removeSiteLink("skwiki"); builder.removeSiteLink("dewiki"); builder.putSiteLink(FR); // new language key builder.putSiteLink(EN2); // new value builder.putSiteLink(CS); // same value builder.putSiteLink(SK); // same value for previously removed builder.putSiteLink(DE2); // new value for previously removed ItemUpdate update = builder.build(); assertThat(update.getRemovedSiteLinks(), is(empty())); assertThat(update.getModifiedSiteLinks().keySet(), containsInAnyOrder("enwiki", "dewiki", "frwiki")); assertEquals(FR, update.getModifiedSiteLinks().get("frwiki")); assertEquals(EN2, update.getModifiedSiteLinks().get("enwiki")); assertEquals(DE2, update.getModifiedSiteLinks().get("dewiki")); } @Test public void testBaseSiteLinkRemoval() { ItemUpdateBuilder builder = ItemUpdateBuilder.forBaseRevision(ItemDocumentBuilder.fromItemDocument(ITEM) .withSiteLink(EN) .withSiteLink(SK) .withSiteLink(CS) .build()); builder.putSiteLink(EN2); builder.putSiteLink(DE); builder.removeSiteLink("skwiki"); // simple case builder.removeSiteLink("frwiki"); // not found builder.removeSiteLink("enwiki"); // previously modified builder.removeSiteLink("dewiki"); // previously added ItemUpdate update = builder.build(); assertThat(update.getModifiedSiteLinks(), anEmptyMap()); assertThat(update.getRemovedSiteLinks(), containsInAnyOrder("enwiki", "skwiki")); } @Test public void testMerge() { ItemUpdate update = ItemUpdateBuilder.forEntityId(Q1) .updateDescriptions(TermUpdateBuilder.create().remove("en").build()) .removeSiteLink("enwiki") .append(ItemUpdateBuilder.forEntityId(Q1) .updateDescriptions(TermUpdateBuilder.create().remove("sk").build()) .removeSiteLink("skwiki") .build()) .build(); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("sk", "en")); assertThat(update.getRemovedSiteLinks(), containsInAnyOrder("skwiki", "enwiki")); } } JsonDeserializerTest.java000066400000000000000000000060151444772566300362270ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2020 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Collections; import org.apache.commons.io.IOUtils; import org.junit.Assert; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; public class JsonDeserializerTest { public JsonDeserializer SUT = new JsonDeserializer(Datamodel.SITE_WIKIDATA); public JsonDeserializer SUTcommons = new JsonDeserializer(Datamodel.SITE_WIKIMEDIA_COMMONS); protected String loadJson(String filename) throws IOException { InputStream stream = JsonDeserializerTest.class.getClassLoader() .getResourceAsStream("JsonDeserializer/"+filename); return IOUtils.toString(stream, StandardCharsets.UTF_8); } @Test public void testLoadItemDocument() throws IOException { ItemDocument doc = SUT.deserializeItemDocument(loadJson("item.json")); Assert.assertEquals(doc.getEntityId(), Datamodel.makeWikidataItemIdValue("Q34987")); } @Test public void testLoadPropertyDocument() throws IOException { PropertyDocument doc = SUT.deserializePropertyDocument(loadJson("property.json")); Assert.assertEquals(doc.getEntityId(), Datamodel.makeWikidataPropertyIdValue("P3467")); } @Test public void testLoadLexemeDocument() throws IOException { LexemeDocument doc = SUT.deserializeLexemeDocument(loadJson("lexeme.json")); Assert.assertEquals(doc.getEntityId(), Datamodel.makeWikidataLexemeIdValue("L3872")); Assert.assertEquals(doc.getForm(Datamodel.makeWikidataFormIdValue("L3872-F2")).getStatementGroups(), Collections.emptyList()); } @Test public void testLoadMediaInfoDocument() throws IOException { MediaInfoDocument doc = SUTcommons.deserializeMediaInfoDocument(loadJson("mediainfo.json")); Assert.assertEquals(doc.getEntityId(), Datamodel.makeWikimediaCommonsMediaInfoIdValue("M74698470")); } @Test public void testDeserializeEntityDocument() throws IOException { EntityDocument doc = SUT.deserializeEntityDocument(loadJson("property.json")); Assert.assertEquals(doc.getEntityId(), Datamodel.makeWikidataPropertyIdValue("P3467")); } } JsonSerializerTest.java000066400000000000000000000161711444772566300357220ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.junit.Assert.*; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Test; import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.JsonComparator; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.MappingIterator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; public class JsonSerializerTest { @Test public void testSerializer() throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); JsonSerializer serializer = new JsonSerializer(out); ItemIdValue qid1 = Datamodel.makeWikidataItemIdValue("Q1"); ItemDocument id1 = Datamodel.makeItemDocument( qid1, Collections.singletonList(Datamodel.makeMonolingualTextValue("Label1", "lang1")), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(Datamodel.makeStatementGroup(Collections.singletonList( Datamodel.makeStatement(qid1, Datamodel.makeNoValueSnak(Datamodel.makeWikidataPropertyIdValue("P42")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "MyId" )))), Collections.emptyMap(), 1234); ItemDocument id2 = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q2"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), 12); PropertyDocument pd1 = Datamodel.makePropertyDocument( Datamodel.makeWikidataPropertyIdValue("P1"), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(Datamodel.makeMonolingualTextValue("Alias1", "lang1")), Collections.emptyList(), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_COMMONS_MEDIA), 3456); serializer.open(); serializer.processItemDocument(id1); serializer.processItemDocument(id2); serializer.processPropertyDocument(pd1); serializer.close(); List inputDocuments = Arrays.asList(id1, id2, pd1); List outputDocuments = new ArrayList<>(); ObjectMapper mapper = new DatamodelMapper("http://www.wikidata.org/entity/"); ObjectReader documentReader = mapper.readerFor(EntityDocumentImpl.class); MappingIterator documentIterator = documentReader.readValues(out.toString()); while (documentIterator.hasNextValue()) { outputDocuments.add(documentIterator.nextValue()); } documentIterator.close(); assertEquals(inputDocuments, outputDocuments); } @Test public void testItemDocumentToJson() throws JsonProcessingException { ItemDocument id = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap()); String json = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":{},\"descriptions\":{},\"aliases\":{},\"claims\":{},\"sitelinks\":{}}"; JsonComparator.compareJsonStrings(json, JsonSerializer.getJsonString(id)); } @Test public void testPropertyDocumentToJson() throws JsonProcessingException { PropertyDocument pd = Datamodel.makePropertyDocument( Datamodel.makeWikidataPropertyIdValue("P1"), Collections. emptyList(), Collections. emptyList(), Collections. emptyList(), Collections. emptyList(), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_ITEM)); String json = "{\"id\":\"P1\",\"aliases\":{},\"labels\":{},\"descriptions\":{},\"claims\":{},\"type\":\"property\", \"datatype\":\"wikibase-item\"}"; JsonComparator.compareJsonStrings(json, JsonSerializer.getJsonString(pd)); } @Test public void testStatementToJson() throws JsonProcessingException { Statement s = Datamodel.makeStatement(ItemIdValue.NULL, Datamodel.makeNoValueSnak(Datamodel.makeWikidataPropertyIdValue("P1")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "MyId"); String json = "{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P1\",\"snaktype\":\"novalue\"},\"type\":\"statement\"}"; JsonComparator.compareJsonStrings(json, JsonSerializer.getJsonString(s)); } @Test public void testJacksonObjectToJsonError() { ItemDocument obj = new ItemDocument() { @Override public List getStatementGroups() { return null; } @Override public long getRevisionId() { return 0; } @Override public Map getLabels() { return null; } @Override public Map getDescriptions() { return null; } @Override public Map> getAliases() { return null; } @Override public ItemDocument withEntityId(ItemIdValue newEntityId) { return null; } @Override public ItemDocument withoutStatementIds(Set statementIds) { return null; } @Override public ItemDocument withStatement(Statement statement) { return null; } @Override public ItemDocument withRevisionId(long newRevisionId) { return null; } @Override public ItemDocument withLabel(MonolingualTextValue newLabel) { return null; } @Override public ItemDocument withDescription(MonolingualTextValue newDescription) { return null; } @Override public ItemDocument withAliases(String language, List aliases) { return null; } @Override public Map getSiteLinks() { return null; } @Override public ItemIdValue getEntityId() { return null; } }; assertThrows(JsonProcessingException.class, () -> JsonSerializer.getJsonString(obj)); } } LabeledDocumentUpdateBuilderTest.java000066400000000000000000000145131444772566300404560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LabeledDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class LabeledDocumentUpdateBuilderTest { private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1; private static final PropertyIdValue P1 = EntityUpdateBuilderTest.P1; private static final MediaInfoIdValue M1 = EntityUpdateBuilderTest.M1; private static final LexemeIdValue L1 = EntityUpdateBuilderTest.L1; private static final ItemDocument ITEM = EntityUpdateBuilderTest.ITEM; private static final PropertyDocument PROPERTY = EntityUpdateBuilderTest.PROPERTY; private static final MediaInfoDocument MEDIA = EntityUpdateBuilderTest.MEDIA; private static final Statement JOHN_HAS_BROWN_HAIR = StatementUpdateBuilderTest.JOHN_HAS_BROWN_HAIR; private static final Statement JOHN_HAS_BLUE_EYES = StatementUpdateBuilderTest.JOHN_HAS_BLUE_EYES; private static final MonolingualTextValue EN = TermUpdateBuilderTest.EN; private static final MonolingualTextValue SK = TermUpdateBuilderTest.SK; @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> LabeledDocumentUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> LabeledDocumentUpdateBuilder.forEntityId(ItemIdValue.NULL)); assertThrows(IllegalArgumentException.class, () -> LabeledDocumentUpdateBuilder.forEntityId(L1)); assertThat(LabeledDocumentUpdateBuilder.forEntityId(Q1), is(instanceOf(ItemUpdateBuilder.class))); assertThat(LabeledDocumentUpdateBuilder.forEntityId(P1), is(instanceOf(PropertyUpdateBuilder.class))); assertThat(LabeledDocumentUpdateBuilder.forEntityId(M1), is(instanceOf(MediaInfoUpdateBuilder.class))); } @Test public void testForBaseRevisionId() { assertEquals(123, LabeledDocumentUpdateBuilder.forBaseRevisionId(Q1, 123).getBaseRevisionId()); assertEquals(123, LabeledDocumentUpdateBuilder.forBaseRevisionId(P1, 123).getBaseRevisionId()); assertEquals(123, LabeledDocumentUpdateBuilder.forBaseRevisionId(M1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> LabeledDocumentUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> LabeledDocumentUpdateBuilder.forBaseRevision(Datamodel.makeItemDocument(ItemIdValue.NULL))); assertThat(LabeledDocumentUpdateBuilder.forBaseRevision(ITEM), is(instanceOf(ItemUpdateBuilder.class))); assertThat(LabeledDocumentUpdateBuilder.forBaseRevision(PROPERTY), is(instanceOf(PropertyUpdateBuilder.class))); assertThat(LabeledDocumentUpdateBuilder.forBaseRevision(MEDIA), is(instanceOf(MediaInfoUpdateBuilder.class))); } @Test public void testStatementUpdate() { LabeledStatementDocumentUpdate update = LabeledDocumentUpdateBuilder.forEntityId(Q1) .updateStatements(StatementUpdateBuilder.create().add(JOHN_HAS_BROWN_HAIR).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(JOHN_HAS_BROWN_HAIR)); } @Test public void testBlindLabelUpdate() { assertThrows(NullPointerException.class, () -> LabeledDocumentUpdateBuilder.forEntityId(Q1).updateLabels(null)); LabeledDocumentUpdate update = LabeledDocumentUpdateBuilder.forEntityId(Q1) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .updateLabels(TermUpdateBuilder.create().remove("sk").build()) .build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en", "sk")); } @Test public void testBaseLabelUpdate() { LabeledDocumentUpdate update = LabeledDocumentUpdateBuilder .forBaseRevision(ITEM .withLabel(EN) .withLabel(SK)) .updateLabels(TermUpdateBuilder.create() .put(SK) // ignored .remove("en") // checked .build()) .build(); assertThat(update.getLabels().getModified(), is(anEmptyMap())); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en")); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> LabeledDocumentUpdateBuilder.forEntityId(Q1).append(null)); LabeledDocumentUpdateBuilder builder = LabeledDocumentUpdateBuilder.forEntityId(Q1) .updateStatements(StatementUpdateBuilder.create().add(JOHN_HAS_BROWN_HAIR).build()) .updateLabels(TermUpdateBuilder.create().remove("en").build()); builder.append(LabeledDocumentUpdateBuilder.forEntityId(Q1) .updateStatements(StatementUpdateBuilder.create().add(JOHN_HAS_BLUE_EYES).build()) .updateLabels(TermUpdateBuilder.create().remove("sk").build()) .build()); LabeledStatementDocumentUpdate update = builder.build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(JOHN_HAS_BROWN_HAIR, JOHN_HAS_BLUE_EYES)); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en", "sk")); } } LexemeUpdateBuilderTest.java000066400000000000000000000465471444772566300366620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThrows; import java.util.Arrays; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class LexemeUpdateBuilderTest { private static final LexemeIdValue L1 = EntityUpdateBuilderTest.L1; private static final Statement L1_DESCRIBES_SOMETHING = StatementBuilder .forSubjectAndProperty(L1, Datamodel.makeWikidataPropertyIdValue("P1")) .withValue(Datamodel.makeStringValue("something")) .build(); private static final Statement L1_EVOKES_FEELING = StatementBuilder .forSubjectAndProperty(L1, Datamodel.makeWikidataPropertyIdValue("P2")) .withValue(Datamodel.makeStringValue("feeling")) .build(); private static final MonolingualTextValue EN = TermUpdateBuilderTest.EN; private static final MonolingualTextValue SK = TermUpdateBuilderTest.SK; private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1; private static final ItemIdValue Q2 = Datamodel.makeWikidataItemIdValue("Q2"); private static final ItemIdValue Q3 = Datamodel.makeWikidataItemIdValue("Q3"); private static final LexemeDocument LEXEME = Datamodel.makeLexemeDocument(L1, Q1, Q2, Arrays.asList(EN)); private static final PropertyIdValue INSTANCE_OF = Datamodel.makeWikidataPropertyIdValue("P31"); private static final ItemIdValue OBSOLETE = Datamodel.makeWikidataItemIdValue("Q11"); private static final ItemIdValue RARE = Datamodel.makeWikidataItemIdValue("Q12"); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forEntityId(LexemeIdValue.NULL)); LexemeUpdateBuilder.forEntityId(L1); } @Test public void testForBaseRevisionId() { assertEquals(123, LexemeUpdateBuilder.forBaseRevisionId(L1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forBaseRevision(LEXEME.withEntityId(LexemeIdValue.NULL))); LexemeUpdateBuilder.forBaseRevision(LEXEME); } @Test public void testStatementUpdate() { LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateStatements(StatementUpdateBuilder.create().add(L1_DESCRIBES_SOMETHING).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(L1_DESCRIBES_SOMETHING)); } @Test public void testLanguageChange() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).setLanguage(null)); assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forEntityId(L1).setLanguage(ItemIdValue.NULL)); assertEquals(Q3, LexemeUpdateBuilder.forEntityId(L1).setLanguage(Q3).build().getLanguage().get()); // different value assertEquals(Q3, LexemeUpdateBuilder.forBaseRevision(LEXEME).setLanguage(Q3).build().getLanguage().get()); // same value assertFalse(LexemeUpdateBuilder.forBaseRevision(LEXEME).setLanguage(Q2).build().getLanguage().isPresent()); // restore previous value assertFalse(LexemeUpdateBuilder.forBaseRevision(LEXEME) .setLanguage(Q3) .setLanguage(Q2) .build() .getLanguage().isPresent()); } @Test public void testLexicalCategoryChange() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).setLexicalCategory(null)); assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forEntityId(L1).setLexicalCategory(ItemIdValue.NULL)); assertEquals(Q3, LexemeUpdateBuilder.forEntityId(L1).setLexicalCategory(Q3).build().getLexicalCategory().get()); // different value assertEquals(Q3, LexemeUpdateBuilder.forBaseRevision(LEXEME) .setLexicalCategory(Q3) .build() .getLexicalCategory().get()); // same value assertFalse(LexemeUpdateBuilder.forBaseRevision(LEXEME) .setLexicalCategory(Q1) .build() .getLexicalCategory().isPresent()); // restore previous value assertFalse(LexemeUpdateBuilder.forBaseRevision(LEXEME) .setLexicalCategory(Q3) .setLexicalCategory(Q1) .build() .getLexicalCategory().isPresent()); } @Test public void testBlindLemmaUpdate() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).updateLemmas(null)); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateLemmas(TermUpdateBuilder.create().remove("en").build()) .updateLemmas(TermUpdateBuilder.create().remove("sk").build()) .build(); assertThat(update.getLemmas().getRemoved(), containsInAnyOrder("en", "sk")); } @Test public void testBaseLemmaUpdate() { LexemeUpdate update = LexemeUpdateBuilder .forBaseRevision(LEXEME .withLemma(EN) .withLemma(SK)) .updateLemmas(TermUpdateBuilder.create() .put(SK) // ignored .remove("en") // checked .build()) .build(); assertThat(update.getLemmas().getModified(), is(anEmptyMap())); assertThat(update.getLemmas().getRemoved(), containsInAnyOrder("en")); } private static FormDocument form(String representation) { return Datamodel.makeFormDocument( FormIdValue.NULL, Arrays.asList(Datamodel.makeMonolingualTextValue(representation, "en")), Collections.emptyList(), Collections.emptyList()); } private static FormIdValue formId(int id) { return Datamodel.makeWikidataFormIdValue("L1-F" + id); } private static FormDocument form(int id, String representation) { return form(representation).withEntityId(formId(id)); } private static FormUpdate formUpdate(int id, String representation) { return FormUpdateBuilder.forEntityId(formId(id)) .updateRepresentations(TermUpdateBuilder.create() .put(Datamodel.makeMonolingualTextValue(representation, "en")) .build()) .build(); } private static FormUpdate withBase(LexemeDocument base, FormUpdate update) { return FormUpdateBuilder.forBaseRevision(base.getForm(update.getEntityId())).append(update).build(); } private static FormUpdate formUpdate(int id, ItemIdValue... classes) { FormIdValue formId = formId(id); StatementUpdateBuilder statements = StatementUpdateBuilder.create(); for (ItemIdValue clazz : classes) { statements.add(StatementBuilder.forSubjectAndProperty(formId, INSTANCE_OF) .withValue(clazz) .build()); } return FormUpdateBuilder.forEntityId(formId) .updateStatements(statements.build()) .build(); } @Test public void testFormAddition() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).addForm(null)); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .addForm(form("swim")) // simple case .addForm(form("swim")) // duplicates allowed .addForm(form(2, "swimming")) // strip ID .addForm(form("swam").withRevisionId(123)) // strip revision ID .build(); assertEquals(Arrays.asList(form("swim"), form("swim"), form("swimming"), form("swam")), update.getAddedForms()); } @Test public void testBlindFormUpdate() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).updateForm(null)); // cannot update removed form assertThrows(IllegalStateException.class, () -> LexemeUpdateBuilder.forEntityId(L1) .removeForm(formId(1)) .updateForm(formUpdate(1, RARE))); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateForm(formUpdate(1, OBSOLETE)) // simple case .updateForm(formUpdate(2, RARE)) .updateForm(formUpdate(2, OBSOLETE)) // merge updates .updateForm(formUpdate(3)) // empty update .build(); assertThat(update.getRemovedForms(), is(empty())); assertThat(update.getUpdatedForms().keySet(), containsInAnyOrder(formId(1), formId(2))); assertEquals(formUpdate(1, OBSOLETE), update.getUpdatedForms().get(formId(1))); assertEquals(formUpdate(2, RARE, OBSOLETE), update.getUpdatedForms().get(formId(2))); // synchronize revision IDs assertEquals(123, LexemeUpdateBuilder.forBaseRevisionId(L1, 123) .updateForm(formUpdate(1, OBSOLETE)) .build() .getUpdatedForms() .get(formId(1)) .getBaseRevisionId()); } @Test public void testBaseFormUpdate() { assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forBaseRevision(LEXEME).updateForm(formUpdate(99, RARE))); LexemeDocument base = LEXEME .withForm(form(1, "swim")) .withForm(form(2, "swims")) .withForm(form(3, "swimming")) .withForm(form(4, "swam")); LexemeUpdate update = LexemeUpdateBuilder.forBaseRevision(base) .updateForm(formUpdate(1, "swims")) // simple case .updateForm(formUpdate(2, "swims")) // replace with the same .updateForm(formUpdate(3, "swam")) .updateForm(formUpdate(3, "swimming")) // revert previous update .updateForm(formUpdate(4, RARE)) .updateForm(formUpdate(4, OBSOLETE)) // merge updates .build(); assertThat(update.getRemovedForms(), is(empty())); assertThat(update.getUpdatedForms().keySet(), containsInAnyOrder(formId(1), formId(4))); assertEquals(withBase(base, formUpdate(1, "swims")), update.getUpdatedForms().get(formId(1))); assertEquals(withBase(base, formUpdate(4, RARE, OBSOLETE)), update.getUpdatedForms().get(formId(4))); } @Test public void testBlindFormRemoval() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).removeForm(null)); assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forEntityId(L1).removeForm(FormIdValue.NULL)); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateForm(formUpdate(2, RARE)) .removeForm(formId(1)) // simple case .removeForm(formId(2)) // previously updated .removeForm(formId(3)) .removeForm(formId(3)) // duplicate removal allowed .build(); assertThat(update.getRemovedForms(), containsInAnyOrder(formId(1), formId(2), formId(3))); assertThat(update.getUpdatedForms(), is(anEmptyMap())); } @Test public void testBaseFormRemoval() { assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forBaseRevision(LEXEME).removeForm(formId(1))); LexemeUpdate update = LexemeUpdateBuilder .forBaseRevision(LEXEME .withForm(form(1, "swim")) .withForm(form(2, "swims"))) .updateForm(formUpdate(2, RARE)) .removeForm(formId(1)) // simple case .removeForm(formId(2)) // previously updated .build(); assertThat(update.getRemovedForms(), containsInAnyOrder(formId(1), formId(2))); assertThat(update.getUpdatedForms(), is(anEmptyMap())); } private static SenseDocument sense(String gloss) { return Datamodel.makeSenseDocument( SenseIdValue.NULL, Arrays.asList(Datamodel.makeMonolingualTextValue(gloss, "en")), Collections.emptyList()); } private static SenseIdValue senseId(int id) { return Datamodel.makeWikidataSenseIdValue("L1-S" + id); } private static SenseDocument sense(int id, String gloss) { return sense(gloss).withEntityId(senseId(id)); } private static SenseUpdate senseUpdate(int id, String gloss) { return SenseUpdateBuilder.forEntityId(senseId(id)) .updateGlosses(TermUpdateBuilder.create() .put(Datamodel.makeMonolingualTextValue(gloss, "en")) .build()) .build(); } private static SenseUpdate withBase(LexemeDocument base, SenseUpdate update) { return SenseUpdateBuilder.forBaseRevision(base.getSense(update.getEntityId())).append(update).build(); } private static SenseUpdate senseUpdate(int id, ItemIdValue... classes) { SenseIdValue senseId = senseId(id); StatementUpdateBuilder statements = StatementUpdateBuilder.create(); for (ItemIdValue clazz : classes) { statements.add(StatementBuilder.forSubjectAndProperty(senseId, INSTANCE_OF) .withValue(clazz) .build()); } return SenseUpdateBuilder.forEntityId(senseId) .updateStatements(statements.build()) .build(); } @Test public void testSenseAddition() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).addSense(null)); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .addSense(sense("move")) // simple case .addSense(sense("move")) // duplicates allowed .addSense(sense(2, "immerse")) // strip ID .addSense(sense("float").withRevisionId(123)) // strip revision ID .build(); assertEquals( Arrays.asList(sense("move"), sense("move"), sense("immerse"), sense("float")), update.getAddedSenses()); } @Test public void testBlindSenseUpdate() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).updateSense(null)); // cannot update removed form assertThrows(IllegalStateException.class, () -> LexemeUpdateBuilder.forEntityId(L1) .removeSense(senseId(1)) .updateSense(senseUpdate(1, RARE))); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateSense(senseUpdate(1, OBSOLETE)) // simple case .updateSense(senseUpdate(2, RARE)) .updateSense(senseUpdate(2, OBSOLETE)) // merge updates .updateSense(senseUpdate(3)) // empty update .build(); assertThat(update.getRemovedSenses(), is(empty())); assertThat(update.getUpdatedSenses().keySet(), containsInAnyOrder(senseId(1), senseId(2))); assertEquals(senseUpdate(1, OBSOLETE), update.getUpdatedSenses().get(senseId(1))); assertEquals(senseUpdate(2, RARE, OBSOLETE), update.getUpdatedSenses().get(senseId(2))); // synchronize revision IDs assertEquals(123, LexemeUpdateBuilder.forBaseRevisionId(L1, 123) .updateSense(senseUpdate(1, OBSOLETE)) .build() .getUpdatedSenses() .get(senseId(1)) .getBaseRevisionId()); } @Test public void testBaseSenseUpdate() { assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forBaseRevision(LEXEME).updateSense(senseUpdate(99, RARE))); LexemeDocument base = LEXEME .withSense(sense(1, "move")) .withSense(sense(2, "immerse")) .withSense(sense(3, "traverse")) .withSense(sense(4, "float")); LexemeUpdate update = LexemeUpdateBuilder.forBaseRevision(base) .updateSense(senseUpdate(1, "move in water")) // simple case .updateSense(senseUpdate(2, "immerse")) // replace with the same .updateSense(senseUpdate(3, "traversal")) .updateSense(senseUpdate(3, "traverse")) // revert previous update .updateSense(senseUpdate(4, RARE)) .updateSense(senseUpdate(4, OBSOLETE)) // merge updates .build(); assertThat(update.getRemovedSenses(), is(empty())); assertThat(update.getUpdatedSenses().keySet(), containsInAnyOrder(senseId(1), senseId(4))); assertEquals(withBase(base, senseUpdate(1, "move in water")), update.getUpdatedSenses().get(senseId(1))); assertEquals(withBase(base, senseUpdate(4, RARE, OBSOLETE)), update.getUpdatedSenses().get(senseId(4))); } @Test public void testBlindSenseRemoval() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).removeSense(null)); assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forEntityId(L1).removeSense(SenseIdValue.NULL)); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateSense(senseUpdate(2, RARE)) .removeSense(senseId(1)) // simple case .removeSense(senseId(2)) // previously updated .removeSense(senseId(3)) .removeSense(senseId(3)) // duplicate removal allowed .build(); assertThat(update.getRemovedSenses(), containsInAnyOrder(senseId(1), senseId(2), senseId(3))); assertThat(update.getUpdatedSenses(), is(anEmptyMap())); } @Test public void testBaseSenseRemoval() { assertThrows(IllegalArgumentException.class, () -> LexemeUpdateBuilder.forBaseRevision(LEXEME).removeSense(senseId(1))); LexemeUpdate update = LexemeUpdateBuilder .forBaseRevision(LEXEME .withSense(sense(1, "move")) .withSense(sense(2, "float"))) .updateSense(senseUpdate(2, RARE)) .removeSense(senseId(1)) // simple case .removeSense(senseId(2)) // previously updated .build(); assertThat(update.getRemovedSenses(), containsInAnyOrder(senseId(1), senseId(2))); assertThat(update.getUpdatedSenses(), is(anEmptyMap())); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> LexemeUpdateBuilder.forEntityId(L1).append(null)); LexemeUpdate update = LexemeUpdateBuilder.forEntityId(L1) .updateStatements(StatementUpdateBuilder.create().add(L1_DESCRIBES_SOMETHING).build()) .updateLemmas(TermUpdateBuilder.create().remove("en").build()) .addForm(form("swim")) .updateForm(formUpdate(2, RARE)) .removeForm(formId(3)) .addSense(sense("move")) .updateSense(senseUpdate(2, RARE)) .removeSense(senseId(3)) .append(LexemeUpdateBuilder.forEntityId(L1) .updateStatements(StatementUpdateBuilder.create().add(L1_EVOKES_FEELING).build()) .updateLemmas(TermUpdateBuilder.create().remove("sk").build()) .addForm(form("swims")) .updateForm(formUpdate(2, OBSOLETE)) .removeForm(formId(4)) .addSense(sense("float")) .updateSense(senseUpdate(2, OBSOLETE)) .removeSense(senseId(4)) .build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(L1_DESCRIBES_SOMETHING, L1_EVOKES_FEELING)); assertThat(update.getLemmas().getRemoved(), containsInAnyOrder("en", "sk")); assertEquals(Arrays.asList(form("swim"), form("swims")), update.getAddedForms()); assertThat(update.getUpdatedForms().keySet(), containsInAnyOrder(formId(2))); assertEquals(formUpdate(2, RARE, OBSOLETE), update.getUpdatedForms().get(formId(2))); assertThat(update.getRemovedForms(), containsInAnyOrder(formId(3), formId(4))); assertEquals(Arrays.asList(sense("move"), sense("float")), update.getAddedSenses()); assertThat(update.getUpdatedSenses().keySet(), containsInAnyOrder(senseId(2))); assertEquals(senseUpdate(2, RARE, OBSOLETE), update.getUpdatedSenses().get(senseId(2))); assertThat(update.getRemovedSenses(), containsInAnyOrder(senseId(3), senseId(4))); } } MediaInfoUpdateBuilderTest.java000066400000000000000000000064371444772566300372700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class MediaInfoUpdateBuilderTest { private static final MediaInfoIdValue M1 = EntityUpdateBuilderTest.M1; private static final MediaInfoDocument MEDIA = EntityUpdateBuilderTest.MEDIA; private static final Statement M1_DESCRIBES_SOMETHING = StatementBuilder .forSubjectAndProperty(M1, Datamodel.makeWikidataPropertyIdValue("P1")) .withValue(Datamodel.makeStringValue("something")) .build(); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> MediaInfoUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> MediaInfoUpdateBuilder.forEntityId(MediaInfoIdValue.NULL)); MediaInfoUpdateBuilder.forEntityId(M1); } @Test public void testForBaseRevisionId() { assertEquals(123, MediaInfoUpdateBuilder.forBaseRevisionId(M1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> MediaInfoUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> MediaInfoUpdateBuilder.forBaseRevision(Datamodel.makeMediaInfoDocument(MediaInfoIdValue.NULL))); MediaInfoUpdateBuilder.forBaseRevision(MEDIA); } @Test public void testStatementUpdate() { MediaInfoUpdate update = MediaInfoUpdateBuilder.forEntityId(M1) .updateStatements(StatementUpdateBuilder.create().add(M1_DESCRIBES_SOMETHING).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(M1_DESCRIBES_SOMETHING)); } @Test public void testLabelUpdate() { MediaInfoUpdate update = MediaInfoUpdateBuilder.forEntityId(M1) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en")); } @Test public void testMerge() { MediaInfoUpdate update = MediaInfoUpdateBuilder.forEntityId(M1) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .apply(MediaInfoUpdateBuilder.forEntityId(M1) .updateLabels(TermUpdateBuilder.create().remove("sk").build()) .build()) .build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("sk", "en")); } } PropertyDocumentBuilderTest.java000066400000000000000000000054351444772566300376120ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; public class PropertyDocumentBuilderTest { @Test public void testSimplePropertyDocumentBuild() { MonolingualTextValue mtv = Datamodel.makeMonolingualTextValue("Test", "de"); PropertyDocument pd1 = Datamodel.makePropertyDocument( PropertyIdValue.NULL, Collections.singletonList(mtv), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_ITEM)); PropertyDocument pd2 = PropertyDocumentBuilder .forPropertyIdAndDatatype(PropertyIdValue.NULL, DatatypeIdValue.DT_ITEM).withLabel(mtv).build(); assertEquals(pd1, pd2); } @Test public void testModifyingBuild() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("color", "en"); PropertyDocument initial = Datamodel.makePropertyDocument(PropertyIdValue.NULL, Collections.singletonList(label), Collections.emptyList(), Collections.emptyList(), Collections. emptyList(), Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_QUANTITY), 1234); PropertyDocument copy = PropertyDocumentBuilder.fromPropertyDocument(initial).build(); assertEquals(copy, initial); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("tone", "en"); PropertyDocument withAlias = PropertyDocumentBuilder.fromPropertyDocument(initial).withAlias(alias).build(); assertEquals(withAlias.getAliases().get("en"), Collections.singletonList(alias)); } @Test(expected = IllegalArgumentException.class) public void testInvalidSubjectId() { PropertyDocumentBuilder.forPropertyIdAndDatatype(PropertyIdValue.NULL, DatatypeIdValue.DT_EXTERNAL_ID).withEntityId(ItemIdValue.NULL); } } PropertyUpdateBuilderTest.java000066400000000000000000000077351444772566300372630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class PropertyUpdateBuilderTest { private static final PropertyIdValue P1 = EntityUpdateBuilderTest.P1; private static final PropertyDocument PROPERTY = EntityUpdateBuilderTest.PROPERTY; private static final Statement P1_DESCRIBES_SOMETHING = StatementBuilder .forSubjectAndProperty(P1, Datamodel.makeWikidataPropertyIdValue("P2")) .withValue(Datamodel.makeStringValue("something")) .build(); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> PropertyUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> PropertyUpdateBuilder.forEntityId(PropertyIdValue.NULL)); PropertyUpdateBuilder.forEntityId(P1); } @Test public void testForBaseRevisionId() { assertEquals(123, PropertyUpdateBuilder.forBaseRevisionId(P1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> PropertyUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> PropertyUpdateBuilder.forBaseRevision( Datamodel.makePropertyDocument( PropertyIdValue.NULL, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_ITEM)))); PropertyUpdateBuilder.forBaseRevision(PROPERTY); } @Test public void testStatementUpdate() { PropertyUpdate update = PropertyUpdateBuilder.forEntityId(P1) .updateStatements(StatementUpdateBuilder.create().add(P1_DESCRIBES_SOMETHING).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(P1_DESCRIBES_SOMETHING)); } @Test public void testLabelUpdate() { PropertyUpdate update = PropertyUpdateBuilder.forEntityId(P1) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en")); } @Test public void testDescriptionUpdate() { PropertyUpdate update = PropertyUpdateBuilder.forEntityId(P1) .updateDescriptions(TermUpdateBuilder.create().remove("en").build()) .build(); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("en")); } @Test public void testAliasUpdate() { PropertyUpdate update = PropertyUpdateBuilder.forEntityId(P1) .updateAliases("sk", AliasUpdateBuilder.create().add(TermUpdateBuilderTest.SK).build()) .build(); assertThat(update.getAliases().keySet(), containsInAnyOrder("sk")); } @Test public void testMerge() { PropertyUpdate update = PropertyUpdateBuilder.forEntityId(P1) .updateDescriptions(TermUpdateBuilder.create().remove("en").build()) .append(PropertyUpdateBuilder.forEntityId(P1) .updateDescriptions(TermUpdateBuilder.create().remove("sk").build()) .build()) .build(); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("sk", "en")); } } ReferenceBuilderTest.java000066400000000000000000000036051444772566300361620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.util.Arrays; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; public class ReferenceBuilderTest { @Test public void testEmptyReference() { Reference r1 = Datamodel.makeReference(Collections .emptyList()); Reference r2 = ReferenceBuilder.newInstance().build(); assertEquals(r1, r2); } @Test public void testComplexReference() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Snak q1 = Datamodel.makeSomeValueSnak(p); Snak q2 = Datamodel.makeNoValueSnak(p); Snak q3 = Datamodel.makeValueSnak(p, i); SnakGroup sg = Datamodel.makeSnakGroup(Arrays.asList(q1, q2, q3)); Reference r1 = Datamodel.makeReference(Collections.singletonList(sg)); Reference r2 = ReferenceBuilder.newInstance().withSomeValue(p) .withNoValue(p).withPropertyValue(p, i).build(); assertEquals(r1, r2); } } SenseUpdateBuilderTest.java000066400000000000000000000112721444772566300365030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.Statement; public class SenseUpdateBuilderTest { private static final SenseIdValue S1 = EntityUpdateBuilderTest.S1; private static final SenseDocument SENSE = EntityUpdateBuilderTest.SENSE; private static final Statement S1_DESCRIBES_SOMETHING = StatementBuilder .forSubjectAndProperty(S1, Datamodel.makeWikidataPropertyIdValue("P1")) .withValue(Datamodel.makeStringValue("something")) .build(); private static final Statement S1_EVOKES_FEELING = StatementBuilder .forSubjectAndProperty(S1, Datamodel.makeWikidataPropertyIdValue("P2")) .withValue(Datamodel.makeStringValue("feeling")) .build(); private static final MonolingualTextValue EN = TermUpdateBuilderTest.EN; private static final MonolingualTextValue SK = TermUpdateBuilderTest.SK; @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> SenseUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> SenseUpdateBuilder.forEntityId(SenseIdValue.NULL)); SenseUpdateBuilder.forEntityId(S1); } @Test public void testForBaseRevisionId() { assertEquals(123, SenseUpdateBuilder.forBaseRevisionId(S1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> SenseUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> SenseUpdateBuilder.forBaseRevision(SENSE.withEntityId(SenseIdValue.NULL))); SenseUpdateBuilder.forBaseRevision(SENSE); } @Test public void testStatementUpdate() { SenseUpdate update = SenseUpdateBuilder.forEntityId(S1) .updateStatements(StatementUpdateBuilder.create().add(S1_DESCRIBES_SOMETHING).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(S1_DESCRIBES_SOMETHING)); } @Test public void testBlindGlossUpdate() { assertThrows(NullPointerException.class, () -> SenseUpdateBuilder.forEntityId(S1).updateGlosses(null)); SenseUpdate update = SenseUpdateBuilder.forEntityId(S1) .updateGlosses(TermUpdateBuilder.create().remove("en").build()) .updateGlosses(TermUpdateBuilder.create().remove("sk").build()) .build(); assertThat(update.getGlosses().getRemoved(), containsInAnyOrder("en", "sk")); } @Test public void testBaseGlossUpdate() { SenseUpdate update = SenseUpdateBuilder .forBaseRevision(SENSE .withGloss(EN) .withGloss(SK)) .updateGlosses(TermUpdateBuilder.create() .put(SK) // ignored .remove("en") // checked .build()) .build(); assertThat(update.getGlosses().getModified(), is(anEmptyMap())); assertThat(update.getGlosses().getRemoved(), containsInAnyOrder("en")); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> SenseUpdateBuilder.forEntityId(S1).append(null)); SenseUpdate update = SenseUpdateBuilder.forEntityId(S1) .updateStatements(StatementUpdateBuilder.create().add(S1_DESCRIBES_SOMETHING).build()) .updateGlosses(TermUpdateBuilder.create().remove("en").build()) .append(SenseUpdateBuilder.forEntityId(S1) .updateStatements(StatementUpdateBuilder.create().add(S1_EVOKES_FEELING).build()) .updateGlosses(TermUpdateBuilder.create().remove("sk").build()) .build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(S1_DESCRIBES_SOMETHING, S1_EVOKES_FEELING)); assertThat(update.getGlosses().getRemoved(), containsInAnyOrder("en", "sk")); } } StatementBuilderTest.java000066400000000000000000000120611444772566300362240ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.util.Arrays; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; public class StatementBuilderTest { @Test public void testEmptyStatement() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Statement stmt1 = Datamodel.makeStatement( i, Datamodel.makeSomeValueSnak(p), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, ""); Statement stmt2 = StatementBuilder.forSubjectAndProperty(i, p).build(); assertEquals(stmt1, stmt2); } @Test public void testComplexStatement() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Snak q1 = Datamodel.makeSomeValueSnak(p); Snak q2 = Datamodel.makeNoValueSnak(p); Snak q3 = Datamodel.makeValueSnak(p, i); SnakGroup sg = Datamodel.makeSnakGroup(Arrays.asList(q1, q2, q3)); Reference r = Datamodel.makeReference(Collections.singletonList(sg)); Statement stmt1 = Datamodel.makeStatement(i, Datamodel.makeValueSnak(p, i), Collections.singletonList(sg), Collections.singletonList(r), StatementRank.PREFERRED, "id"); Statement stmt2 = StatementBuilder.forSubjectAndProperty(i, p) .withRank(StatementRank.PREFERRED).withValue(i) .withQualifierSomeValue(p).withQualifierNoValue(p) .withQualifierValue(p, i).withId("id").withReference(r).build(); assertEquals(stmt1, stmt2); } @Test public void testQualifierList() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Snak q1 = Datamodel.makeSomeValueSnak(p); Snak q2 = Datamodel.makeNoValueSnak(p); Snak q3 = Datamodel.makeValueSnak(p, i); SnakGroup sg = Datamodel.makeSnakGroup(Arrays.asList(q1, q2, q3)); Reference r = Datamodel.makeReference(Collections.singletonList(sg)); Statement stmt1 = Datamodel.makeStatement(i, Datamodel.makeValueSnak(p, i), Collections.singletonList(sg), Collections.singletonList(r), StatementRank.PREFERRED, "id"); Statement stmt2 = StatementBuilder.forSubjectAndProperty(i, p) .withRank(StatementRank.PREFERRED).withValue(i) .withQualifiers(stmt1.getQualifiers()).withId("id") .withReference(r).build(); assertEquals(stmt1, stmt2); } @Test public void testReferenceList() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Reference r1 = ReferenceBuilder.newInstance().withSomeValue(p).build(); Reference r2 = ReferenceBuilder.newInstance().withPropertyValue(p, i) .build(); Snak q1 = Datamodel.makeSomeValueSnak(p); Snak q2 = Datamodel.makeNoValueSnak(p); Snak q3 = Datamodel.makeValueSnak(p, i); SnakGroup sg = Datamodel.makeSnakGroup(Arrays.asList(q1, q2, q3)); Statement stmt1 = Datamodel.makeStatement(i, Datamodel.makeValueSnak(p, i), Collections.singletonList(sg), Arrays.asList(r1, r2), StatementRank.PREFERRED, "id"); Statement stmt2 = StatementBuilder.forSubjectAndProperty(i, p) .withRank(StatementRank.PREFERRED).withValue(i) .withQualifierSomeValue(p).withQualifierNoValue(p) .withQualifierValue(p, i).withId("id") .withReferences(Arrays.asList(r1, r2)).build(); assertEquals(stmt1, stmt2); } @Test public void testNoValueStatement() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Statement stmt1 = Datamodel.makeStatement( i, Datamodel.makeNoValueSnak(p), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, ""); Statement stmt2 = StatementBuilder.forSubjectAndProperty(i, p) .withNoValue().build(); assertEquals(stmt1, stmt2); } @Test public void testSomeValueStatement() { ItemIdValue i = ItemIdValue.NULL; PropertyIdValue p = PropertyIdValue.NULL; Statement stmt1 = Datamodel.makeStatement(i, Datamodel.makeSomeValueSnak(p), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, ""); Statement stmt2 = StatementBuilder.forSubjectAndProperty(i, p) .withSomeValue().build(); assertEquals(stmt1, stmt2); } } StatementDocumentTest.java000066400000000000000000000044621444772566300364220ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helperspackage org.wikidata.wdtk.datamodel.helpers; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import static org.junit.Assert.*; public class StatementDocumentTest { private static ItemIdValue Q1 = Datamodel.makeWikidataItemIdValue("Q1"); private static ItemIdValue Q2 = Datamodel.makeWikidataItemIdValue("Q2"); private static PropertyIdValue P1 = Datamodel.makeWikidataPropertyIdValue("P1"); private static PropertyIdValue P2 = Datamodel.makeWikidataPropertyIdValue("P2"); private static PropertyIdValue P3 = Datamodel.makeWikidataPropertyIdValue("P3"); @Test public void testFindStatement() { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q2).build(); Statement s3 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q1).build(); ItemDocument id = ItemDocumentBuilder.forItemId(Q1).withStatement(s1) .withStatement(s2).withStatement(s3).build(); assertTrue(id.hasStatement(P1)); assertTrue(id.hasStatement("P1")); assertNull(id.findStatement(P1)); assertNull(id.findStatement("P1")); assertTrue(id.hasStatement(P2)); assertTrue(id.hasStatement("P2")); assertEquals(s3, id.findStatement(P2)); assertEquals(s3, id.findStatement("P2")); assertFalse(id.hasStatement(P3)); assertFalse(id.hasStatement("P3")); assertNull(id.findStatement(P3)); assertNull(id.findStatement("P3")); } } StatementDocumentUpdateBuilderTest.java000066400000000000000000000175411444772566300410760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocumentUpdate; public class StatementDocumentUpdateBuilderTest { private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1; private static final PropertyIdValue P1 = EntityUpdateBuilderTest.P1; private static final MediaInfoIdValue M1 = EntityUpdateBuilderTest.M1; private static final LexemeIdValue L1 = EntityUpdateBuilderTest.L1; private static final FormIdValue F1 = EntityUpdateBuilderTest.F1; private static final SenseIdValue S1 = EntityUpdateBuilderTest.S1; private static final ItemDocument ITEM = EntityUpdateBuilderTest.ITEM; private static final PropertyDocument PROPERTY = EntityUpdateBuilderTest.PROPERTY; private static final MediaInfoDocument MEDIA = EntityUpdateBuilderTest.MEDIA; private static final LexemeDocument LEXEME = EntityUpdateBuilderTest.LEXEME; private static final FormDocument FORM = EntityUpdateBuilderTest.FORM; private static final SenseDocument SENSE = EntityUpdateBuilderTest.SENSE; private static final EntityIdValue JOHN = StatementUpdateBuilderTest.JOHN; private static final EntityIdValue RITA = StatementUpdateBuilderTest.RITA; private static final Statement JOHN_ALREADY_HAS_BROWN_HAIR = StatementUpdateBuilderTest.JOHN_ALREADY_HAS_BROWN_HAIR; private static final Statement JOHN_ALREADY_HAS_BLUE_EYES = StatementUpdateBuilderTest.JOHN_ALREADY_HAS_BLUE_EYES; @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> StatementDocumentUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> StatementDocumentUpdateBuilder.forEntityId(ItemIdValue.NULL)); assertThat(StatementDocumentUpdateBuilder.forEntityId(Q1), is(instanceOf(ItemUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forEntityId(P1), is(instanceOf(PropertyUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forEntityId(M1), is(instanceOf(MediaInfoUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forEntityId(L1), is(instanceOf(LexemeUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forEntityId(F1), is(instanceOf(FormUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forEntityId(S1), is(instanceOf(SenseUpdateBuilder.class))); } @Test public void testForBaseRevisionId() { assertEquals(123, StatementDocumentUpdateBuilder.forBaseRevisionId(Q1, 123).getBaseRevisionId()); assertEquals(123, StatementDocumentUpdateBuilder.forBaseRevisionId(P1, 123).getBaseRevisionId()); assertEquals(123, StatementDocumentUpdateBuilder.forBaseRevisionId(M1, 123).getBaseRevisionId()); assertEquals(123, StatementDocumentUpdateBuilder.forBaseRevisionId(L1, 123).getBaseRevisionId()); assertEquals(123, StatementDocumentUpdateBuilder.forBaseRevisionId(F1, 123).getBaseRevisionId()); assertEquals(123, StatementDocumentUpdateBuilder.forBaseRevisionId(S1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> StatementDocumentUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> StatementDocumentUpdateBuilder.forBaseRevision(Datamodel.makeItemDocument(ItemIdValue.NULL))); assertThat(StatementDocumentUpdateBuilder.forBaseRevision(ITEM), is(instanceOf(ItemUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forBaseRevision(PROPERTY), is(instanceOf(PropertyUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forBaseRevision(MEDIA), is(instanceOf(MediaInfoUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forBaseRevision(LEXEME), is(instanceOf(LexemeUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forBaseRevision(FORM), is(instanceOf(FormUpdateBuilder.class))); assertThat(StatementDocumentUpdateBuilder.forBaseRevision(SENSE), is(instanceOf(SenseUpdateBuilder.class))); } @Test public void testBlindStatementUpdate() { assertThrows(NullPointerException.class, () -> StatementDocumentUpdateBuilder.forEntityId(Q1).updateStatements(null)); assertThrows(IllegalArgumentException.class, () -> StatementDocumentUpdateBuilder.forEntityId(RITA) .updateStatements(StatementUpdateBuilder.create().replace(JOHN_ALREADY_HAS_BROWN_HAIR).build())); StatementDocumentUpdate update = StatementDocumentUpdateBuilder.forEntityId(JOHN) .updateStatements(StatementUpdateBuilder.create().replace(JOHN_ALREADY_HAS_BROWN_HAIR).build()) .updateStatements(StatementUpdateBuilder.create().replace(JOHN_ALREADY_HAS_BLUE_EYES).build()) .build(); assertThat(update.getStatements().getReplaced().values(), containsInAnyOrder(JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_BLUE_EYES)); } @Test public void testBaseStatementUpdate() { StatementDocumentUpdate update = StatementDocumentUpdateBuilder .forBaseRevision(ITEM .withStatement(JOHN_ALREADY_HAS_BROWN_HAIR) .withStatement(JOHN_ALREADY_HAS_BLUE_EYES)) .updateStatements(StatementUpdateBuilder.create() .replace(JOHN_ALREADY_HAS_BROWN_HAIR) // ignored .remove(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId()) // checked .build()) .build(); assertThat(update.getStatements().getReplaced(), is(anEmptyMap())); assertThat(update.getStatements().getRemoved(), containsInAnyOrder(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId())); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> StatementDocumentUpdateBuilder.forEntityId(Q1).append(null)); StatementDocumentUpdateBuilder builder = StatementDocumentUpdateBuilder.forEntityId(JOHN) .updateStatements(StatementUpdateBuilder.create().replace(JOHN_ALREADY_HAS_BROWN_HAIR).build()); builder.append(StatementDocumentUpdateBuilder.forEntityId(JOHN) .updateStatements(StatementUpdateBuilder.create().replace(JOHN_ALREADY_HAS_BLUE_EYES).build()) .build()); StatementDocumentUpdate update = builder.build(); assertThat(update.getStatements().getReplaced().values(), containsInAnyOrder(JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_BLUE_EYES)); } } StatementUpdateBuilderTest.java000066400000000000000000000334001444772566300373670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import java.util.Arrays; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.StringValue; public class StatementUpdateBuilderTest { static final EntityIdValue JOHN = Datamodel.makeWikidataItemIdValue("Q1"); static final EntityIdValue RITA = Datamodel.makeWikidataItemIdValue("Q2"); static final PropertyIdValue HAIR = Datamodel.makeWikidataPropertyIdValue("P1"); static final PropertyIdValue EYES = Datamodel.makeWikidataPropertyIdValue("P2"); static final PropertyIdValue SHIRT = Datamodel.makeWikidataPropertyIdValue("P3"); static final PropertyIdValue TROUSERS = Datamodel.makeWikidataPropertyIdValue("P4"); static final StringValue BROWN = Datamodel.makeStringValue("brown"); static final StringValue SILVER = Datamodel.makeStringValue("silver"); static final StringValue BLUE = Datamodel.makeStringValue("blue"); static final Statement NOBODY_HAS_BROWN_HAIR = StatementBuilder .forSubjectAndProperty(ItemIdValue.NULL, HAIR) .withValue(BROWN) .build(); static final Statement NOBODY_ALREADY_HAS_BROWN_HAIR = NOBODY_HAS_BROWN_HAIR.withStatementId("ID1"); static final Statement JOHN_HAS_BROWN_HAIR = StatementBuilder .forSubjectAndProperty(JOHN, HAIR) .withValue(BROWN) .build(); static final Statement JOHN_ALREADY_HAS_BROWN_HAIR = JOHN_HAS_BROWN_HAIR.withStatementId("ID2"); static final Statement RITA_HAS_BROWN_HAIR = StatementBuilder .forSubjectAndProperty(RITA, HAIR) .withValue(BROWN) .build(); static final Statement RITA_ALREADY_HAS_BROWN_HAIR = RITA_HAS_BROWN_HAIR.withStatementId("ID3"); static final Statement JOHN_HAS_BROWN_EYES = StatementBuilder .forSubjectAndProperty(JOHN, EYES) .withValue(BROWN) .build(); static final Statement JOHN_ALREADY_HAS_BROWN_EYES = JOHN_HAS_BROWN_EYES.withStatementId("ID4"); static final Statement JOHN_HAS_SILVER_HAIR = StatementBuilder .forSubjectAndProperty(JOHN, HAIR) .withValue(SILVER) .build(); static final Statement JOHN_ALREADY_HAS_SILVER_HAIR = JOHN_HAS_SILVER_HAIR.withStatementId("ID5"); static final Statement JOHN_HAS_BLUE_SHIRT = StatementBuilder .forSubjectAndProperty(JOHN, SHIRT) .withValue(BLUE) .build(); static final Statement JOHN_ALREADY_HAS_BLUE_SHIRT = JOHN_HAS_BLUE_SHIRT.withStatementId("ID6"); static final Statement JOHN_HAS_BROWN_TROUSERS = StatementBuilder .forSubjectAndProperty(JOHN, TROUSERS) .withValue(BROWN) .build(); static final Statement JOHN_ALREADY_HAS_BROWN_TROUSERS = JOHN_HAS_BROWN_TROUSERS.withStatementId("ID7"); static final Statement JOHN_HAS_BLUE_TROUSERS = StatementBuilder .forSubjectAndProperty(JOHN, TROUSERS) .withValue(BLUE) .build(); static final Statement JOHN_HAS_BLUE_EYES = StatementBuilder .forSubjectAndProperty(JOHN, EYES) .withValue(BLUE) .build(); static final Statement JOHN_ALREADY_HAS_BLUE_EYES = JOHN_HAS_BLUE_EYES.withStatementId("ID8"); @Test public void testCreate() { StatementUpdate update = StatementUpdateBuilder.create().build(); assertThat(update.getAdded(), is(empty())); assertThat(update.getReplaced(), is(anEmptyMap())); assertThat(update.getRemoved(), is(empty())); } @Test public void testCreateWithSubject() { StatementUpdateBuilder builder = StatementUpdateBuilder.create(JOHN); assertThrows(IllegalArgumentException.class, () -> builder.add(RITA_HAS_BROWN_HAIR)); assertThrows(IllegalArgumentException.class, () -> builder.replace(RITA_ALREADY_HAS_BROWN_HAIR)); builder.add(JOHN_HAS_BLUE_EYES); } @Test public void testForStatements() { assertThrows(NullPointerException.class, () -> StatementUpdateBuilder.forStatements(null)); assertThrows(NullPointerException.class, () -> StatementUpdateBuilder.forStatements(Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR, null))); // no statement subject assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder.forStatements(Arrays.asList(NOBODY_ALREADY_HAS_BROWN_HAIR))); // no statement ID assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder.forStatements(Arrays.asList(JOHN_HAS_BROWN_HAIR))); // duplicate statement ID assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder .forStatements(Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_BROWN_HAIR))); // inconsistent statement subject assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder .forStatements(Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR, RITA_ALREADY_HAS_BROWN_HAIR))); // no base statements StatementUpdateBuilder.forStatements(Collections.emptyList()); StatementUpdate update = StatementUpdateBuilder .forStatements(Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_BROWN_EYES)) .build(); assertThat(update.getAdded(), is(empty())); assertThat(update.getReplaced(), is(anEmptyMap())); assertThat(update.getRemoved(), is(empty())); } @Test public void testForStatementsWithSubject() { assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder.forStatements(JOHN, Arrays.asList(RITA_ALREADY_HAS_BROWN_HAIR))); StatementUpdateBuilder builder = StatementUpdateBuilder.forStatements(JOHN, Collections.emptyList()); assertThrows(IllegalArgumentException.class, () -> builder.add(RITA_HAS_BROWN_HAIR)); } @Test public void testForStatementGroups() { assertThrows(NullPointerException.class, () -> StatementUpdateBuilder.forStatementGroups(null)); StatementGroup johnAlreadyHasBrownAndSilverHair = Datamodel.makeStatementGroup( Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_SILVER_HAIR)); assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder.forStatementGroups(Arrays.asList(johnAlreadyHasBrownAndSilverHair, null))); // no statement groups StatementUpdateBuilder.forStatementGroups(Collections.emptyList()); StatementUpdate update = StatementUpdateBuilder .forStatementGroups(Arrays.asList(johnAlreadyHasBrownAndSilverHair)) .build(); assertThat(update.getAdded(), is(empty())); assertThat(update.getReplaced(), is(anEmptyMap())); assertThat(update.getRemoved(), is(empty())); } @Test public void testForStatementGroupsWithSubject() { assertThrows(IllegalArgumentException.class, () -> StatementUpdateBuilder.forStatementGroups(JOHN, Arrays.asList(Datamodel.makeStatementGroup(Arrays.asList(RITA_ALREADY_HAS_BROWN_HAIR))))); StatementUpdateBuilder builder = StatementUpdateBuilder.forStatementGroups(JOHN, Collections.emptyList()); assertThrows(IllegalArgumentException.class, () -> builder.add(RITA_HAS_BROWN_HAIR)); } @Test public void testBlindAddition() { StatementUpdateBuilder builder = StatementUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.add(null)); // placeholder ID assertThrows(IllegalArgumentException.class, () -> builder.add(NOBODY_HAS_BROWN_HAIR)); builder.add(JOHN_HAS_BROWN_HAIR); // simple case builder.add(JOHN_HAS_BROWN_HAIR); // duplicates allowed builder.add(JOHN_ALREADY_HAS_BROWN_EYES); // strip ID // inconsistent subject assertThrows(IllegalArgumentException.class, () -> builder.add(RITA_HAS_BROWN_HAIR)); StatementUpdate update = builder.build(); assertEquals(update.getAdded(), Arrays.asList(JOHN_HAS_BROWN_HAIR, JOHN_HAS_BROWN_HAIR, JOHN_HAS_BROWN_EYES)); } @Test public void testBlindReplacement() { StatementUpdateBuilder builder = StatementUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.replace(null)); // placeholder ID assertThrows(IllegalArgumentException.class, () -> builder.replace(NOBODY_ALREADY_HAS_BROWN_HAIR)); builder.remove(JOHN_ALREADY_HAS_BROWN_EYES.getStatementId()); builder.replace(JOHN_ALREADY_HAS_BROWN_HAIR); // simple case builder.replace(JOHN_ALREADY_HAS_BROWN_EYES); // previously removed builder.replace(JOHN_ALREADY_HAS_BROWN_HAIR); // replace twice // inconsistent subject assertThrows(IllegalArgumentException.class, () -> builder.replace(RITA_ALREADY_HAS_BROWN_HAIR)); // no statement ID assertThrows(IllegalArgumentException.class, () -> builder.replace(JOHN_HAS_SILVER_HAIR)); StatementUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat( update.getReplaced().values(), containsInAnyOrder(JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_BROWN_EYES)); } @Test public void testBlindRemoval() { StatementUpdateBuilder builder = StatementUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.remove(null)); assertThrows(IllegalArgumentException.class, () -> builder.remove("")); builder.replace(JOHN_ALREADY_HAS_BROWN_EYES); builder.remove(JOHN_ALREADY_HAS_BROWN_HAIR.getStatementId()); // simple case builder.remove(JOHN_ALREADY_HAS_BROWN_EYES.getStatementId()); // previously replaced StatementUpdate update = builder.build(); assertThat(update.getReplaced(), is(anEmptyMap())); assertThat(update.getRemoved(), containsInAnyOrder( JOHN_ALREADY_HAS_BROWN_HAIR.getStatementId(), JOHN_ALREADY_HAS_BROWN_EYES.getStatementId())); } @Test public void testBaseAddition() { StatementUpdateBuilder builder = StatementUpdateBuilder .forStatements(Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR)); // inconsistent subject assertThrows(IllegalArgumentException.class, () -> builder.add(RITA_HAS_BROWN_HAIR)); builder.add(JOHN_HAS_BROWN_EYES); // simple case builder.add(JOHN_ALREADY_HAS_BROWN_HAIR); // duplicating existing statements is allowed StatementUpdate update = builder.build(); assertEquals(update.getAdded(), Arrays.asList(JOHN_HAS_BROWN_EYES, JOHN_HAS_BROWN_HAIR)); } @Test public void testBaseReplacement() { StatementUpdateBuilder builder = StatementUpdateBuilder.forStatements(Arrays.asList( JOHN_ALREADY_HAS_BROWN_HAIR, JOHN_ALREADY_HAS_BROWN_EYES, JOHN_ALREADY_HAS_BLUE_SHIRT, JOHN_ALREADY_HAS_BROWN_TROUSERS)); builder.remove(JOHN_ALREADY_HAS_BROWN_EYES.getStatementId()); Statement johnChangesBrownTrousersToBlueTrousers = JOHN_HAS_BLUE_TROUSERS .withStatementId(JOHN_ALREADY_HAS_BROWN_TROUSERS.getStatementId()); builder.replace(johnChangesBrownTrousersToBlueTrousers); // inconsistent subject assertThrows(IllegalArgumentException.class, () -> builder.replace( RITA_ALREADY_HAS_BROWN_HAIR.withStatementId(JOHN_ALREADY_HAS_BROWN_EYES.getStatementId()))); // unknown ID assertThrows(IllegalArgumentException.class, () -> builder.replace(JOHN_ALREADY_HAS_BROWN_HAIR.withStatementId("ID999"))); Statement johnChangesBrownHairToSilverHair = JOHN_HAS_SILVER_HAIR .withStatementId(JOHN_ALREADY_HAS_BROWN_HAIR.getStatementId()); builder.replace(johnChangesBrownHairToSilverHair); // simple case builder.replace(JOHN_ALREADY_HAS_BLUE_SHIRT); // no change builder.replace(JOHN_ALREADY_HAS_BROWN_EYES); // restore deleted builder.replace(JOHN_ALREADY_HAS_BROWN_TROUSERS); // restore replaced StatementUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat(update.getReplaced().values(), containsInAnyOrder(johnChangesBrownHairToSilverHair)); } @Test public void testBaseRemoval() { StatementUpdateBuilder builder = StatementUpdateBuilder .forStatements(Arrays.asList(JOHN_ALREADY_HAS_BROWN_HAIR)); assertThrows(IllegalArgumentException.class, () -> builder.remove("ID999")); // unknown ID builder.remove(JOHN_ALREADY_HAS_BROWN_HAIR.getStatementId()); // simple case StatementUpdate update = builder.build(); assertThat(update.getRemoved(), containsInAnyOrder(JOHN_ALREADY_HAS_BROWN_HAIR.getStatementId())); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> StatementUpdateBuilder.create().append(null)); StatementUpdate update = StatementUpdateBuilder.create() .add(JOHN_HAS_BROWN_EYES) // prior addition .replace(JOHN_ALREADY_HAS_SILVER_HAIR) // prior replacement .remove(JOHN_ALREADY_HAS_BLUE_SHIRT.getStatementId()) // prior removal .append(StatementUpdateBuilder.create() .add(JOHN_HAS_BROWN_TROUSERS) // another addition .replace(JOHN_ALREADY_HAS_BROWN_HAIR) // another replacement .remove(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId()) // another removal .build()) .build(); assertEquals(update.getAdded(), Arrays.asList(JOHN_HAS_BROWN_EYES, JOHN_HAS_BROWN_TROUSERS)); assertThat(update.getReplaced().values(), containsInAnyOrder(JOHN_ALREADY_HAS_SILVER_HAIR, JOHN_ALREADY_HAS_BROWN_HAIR)); assertThat(update.getRemoved(), containsInAnyOrder( JOHN_ALREADY_HAS_BLUE_SHIRT.getStatementId(), JOHN_ALREADY_HAS_BLUE_EYES.getStatementId())); } } TermUpdateBuilderTest.java000066400000000000000000000126051444772566300363360ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import java.util.Arrays; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class TermUpdateBuilderTest { static final MonolingualTextValue EN = Datamodel.makeMonolingualTextValue("hello", "en"); static final MonolingualTextValue EN2 = Datamodel.makeMonolingualTextValue("hi", "en"); static final MonolingualTextValue SK = Datamodel.makeMonolingualTextValue("ahoj", "sk"); static final MonolingualTextValue CS = Datamodel.makeMonolingualTextValue("nazdar", "cs"); static final MonolingualTextValue DE = Datamodel.makeMonolingualTextValue("Hallo", "de"); static final MonolingualTextValue DE2 = Datamodel.makeMonolingualTextValue("Guten Tag", "de"); static final MonolingualTextValue FR = Datamodel.makeMonolingualTextValue("Bonjour", "fr"); @Test public void testCreate() { TermUpdate update = TermUpdateBuilder.create().build(); assertThat(update.getModified(), is(anEmptyMap())); assertThat(update.getRemoved(), is(empty())); } @Test public void testForTerms() { assertThrows(NullPointerException.class, () -> TermUpdateBuilder.forTerms(null)); assertThrows(NullPointerException.class, () -> TermUpdateBuilder.forTerms(Arrays.asList(SK, null))); assertThrows(IllegalArgumentException.class, () -> TermUpdateBuilder.forTerms(Arrays.asList(SK, SK))); TermUpdate update = TermUpdateBuilder.forTerms(Arrays.asList(SK, EN)).build(); assertThat(update.getModified(), is(anEmptyMap())); assertThat(update.getRemoved(), is(empty())); } @Test public void testBlindAssignment() { TermUpdateBuilder builder = TermUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.put(null)); builder.remove("sk"); builder.remove("de"); builder.put(EN); // simple case builder.put(SK); // previously removed TermUpdate update = builder.build(); assertThat(update.getRemoved(), containsInAnyOrder("de")); assertThat(update.getModified().keySet(), containsInAnyOrder("sk", "en")); assertEquals(EN, update.getModified().get("en")); assertEquals(SK, update.getModified().get("sk")); } @Test public void testBlindRemoval() { TermUpdateBuilder builder = TermUpdateBuilder.create(); assertThrows(NullPointerException.class, () -> builder.remove(null)); assertThrows(IllegalArgumentException.class, () -> builder.remove(" ")); builder.put(EN); builder.put(SK); builder.remove("de"); // simple case builder.remove("sk"); // previously assigned TermUpdate update = builder.build(); assertThat(update.getRemoved(), containsInAnyOrder("sk", "de")); assertThat(update.getModified().keySet(), containsInAnyOrder("en")); } @Test public void testBaseAssignment() { TermUpdateBuilder builder = TermUpdateBuilder.forTerms(Arrays.asList(SK, EN, DE, CS)); builder.remove("sk"); builder.remove("de"); builder.put(FR); // new language key builder.put(EN2); // new value builder.put(CS); // same value builder.put(SK); // same value for previously removed builder.put(DE2); // new value for previously removed TermUpdate update = builder.build(); assertThat(update.getRemoved(), is(empty())); assertThat(update.getModified().keySet(), containsInAnyOrder("en", "de", "fr")); assertEquals(FR, update.getModified().get("fr")); assertEquals(EN2, update.getModified().get("en")); assertEquals(DE2, update.getModified().get("de")); } @Test public void testBaseRemoval() { TermUpdateBuilder builder = TermUpdateBuilder.forTerms(Arrays.asList(EN, SK, CS)); builder.put(EN2); builder.put(DE); builder.remove("sk"); // simple case builder.remove("fr"); // not found builder.remove("en"); // previously modified builder.remove("de"); // previously added TermUpdate update = builder.build(); assertThat(update.getModified(), anEmptyMap()); assertThat(update.getRemoved(), containsInAnyOrder("en", "sk")); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> TermUpdateBuilder.create().append(null)); TermUpdate update = TermUpdateBuilder.create() .put(EN) // prior assignment .remove("sk") // prior removal .append(TermUpdateBuilder.create() .put(DE) // another replacement .remove("cs") // another removal .build()) .build(); assertThat(update.getModified().values(), containsInAnyOrder(EN, DE)); assertThat(update.getRemoved(), containsInAnyOrder("sk", "cs")); } } TermedDocumentUpdateBuilderTest.java000066400000000000000000000221101444772566300403360ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/helpers/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.helpers; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import java.util.Arrays; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocumentUpdate; public class TermedDocumentUpdateBuilderTest { private static final ItemIdValue Q1 = EntityUpdateBuilderTest.Q1; private static final PropertyIdValue P1 = EntityUpdateBuilderTest.P1; private static final MediaInfoIdValue M1 = EntityUpdateBuilderTest.M1; private static final ItemDocument ITEM = EntityUpdateBuilderTest.ITEM; private static final PropertyDocument PROPERTY = EntityUpdateBuilderTest.PROPERTY; private static final Statement JOHN_HAS_BROWN_HAIR = StatementUpdateBuilderTest.JOHN_HAS_BROWN_HAIR; private static final MonolingualTextValue EN = TermUpdateBuilderTest.EN; private static final MonolingualTextValue EN2 = TermUpdateBuilderTest.EN2; private static final MonolingualTextValue DE = TermUpdateBuilderTest.DE; private static final MonolingualTextValue DE2 = TermUpdateBuilderTest.DE2; private static final MonolingualTextValue SK = TermUpdateBuilderTest.SK; private static final MonolingualTextValue CS = TermUpdateBuilderTest.CS; private static final MonolingualTextValue FR = TermUpdateBuilderTest.FR; private static final MonolingualTextValue ES = Datamodel.makeMonolingualTextValue("hola", "es"); @Test public void testForEntityId() { assertThrows(NullPointerException.class, () -> TermedDocumentUpdateBuilder.forEntityId(null)); assertThrows(IllegalArgumentException.class, () -> TermedDocumentUpdateBuilder.forEntityId(ItemIdValue.NULL)); assertThrows(IllegalArgumentException.class, () -> TermedDocumentUpdateBuilder.forEntityId(M1)); assertThat(TermedDocumentUpdateBuilder.forEntityId(Q1), is(instanceOf(ItemUpdateBuilder.class))); assertThat(TermedDocumentUpdateBuilder.forEntityId(P1), is(instanceOf(PropertyUpdateBuilder.class))); } @Test public void testForBaseRevisionId() { assertEquals(123, TermedDocumentUpdateBuilder.forBaseRevisionId(Q1, 123).getBaseRevisionId()); assertEquals(123, TermedDocumentUpdateBuilder.forBaseRevisionId(P1, 123).getBaseRevisionId()); } @Test public void testForBaseRevision() { assertThrows(NullPointerException.class, () -> TermedDocumentUpdateBuilder.forBaseRevision(null)); assertThrows(IllegalArgumentException.class, () -> TermedDocumentUpdateBuilder.forBaseRevision(Datamodel.makeItemDocument(ItemIdValue.NULL))); assertThat(TermedDocumentUpdateBuilder.forBaseRevision(ITEM), is(instanceOf(ItemUpdateBuilder.class))); assertThat(TermedDocumentUpdateBuilder.forBaseRevision(PROPERTY), is(instanceOf(PropertyUpdateBuilder.class))); } @Test public void testStatementUpdate() { TermedStatementDocumentUpdate update = TermedDocumentUpdateBuilder.forEntityId(Q1) .updateStatements(StatementUpdateBuilder.create().add(JOHN_HAS_BROWN_HAIR).build()) .build(); assertThat(update.getStatements().getAdded(), containsInAnyOrder(JOHN_HAS_BROWN_HAIR)); } @Test public void testLabelUpdate() { TermedStatementDocumentUpdate update = TermedDocumentUpdateBuilder.forEntityId(Q1) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("en")); } @Test public void testBlindDescriptionUpdate() { assertThrows(NullPointerException.class, () -> TermedDocumentUpdateBuilder.forEntityId(Q1).updateDescriptions(null)); TermedStatementDocumentUpdate update = TermedDocumentUpdateBuilder.forEntityId(Q1) .updateDescriptions(TermUpdateBuilder.create().remove("en").build()) .updateDescriptions(TermUpdateBuilder.create().remove("sk").build()) .build(); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("en", "sk")); } @Test public void testBaseDescriptionUpdate() { TermedStatementDocumentUpdate update = TermedDocumentUpdateBuilder .forBaseRevision(ITEM .withDescription(EN) .withDescription(SK)) .updateDescriptions(TermUpdateBuilder.create() .put(SK) // ignored .remove("en") // checked .build()) .build(); assertThat(update.getDescriptions().getModified(), is(anEmptyMap())); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("en")); } @Test public void testBlindAliasChanges() { TermedDocumentUpdateBuilder builder = TermedDocumentUpdateBuilder.forEntityId(Q1); assertThrows(NullPointerException.class, () -> builder.updateAliases(null, AliasUpdate.EMPTY)); assertThrows(IllegalArgumentException.class, () -> builder.updateAliases(" ", AliasUpdate.EMPTY)); assertThrows(NullPointerException.class, () -> builder.updateAliases("en", null)); TermedStatementDocumentUpdate update = builder .updateAliases("sk", AliasUpdateBuilder.create().add(SK).build()) // simple case .updateAliases("cs", AliasUpdate.EMPTY) // empty update .updateAliases("de", AliasUpdateBuilder.create().add(DE).build()) .updateAliases("de", AliasUpdateBuilder.create().add(DE2).build()) // merge changes .build(); assertThat(update.getAliases().keySet(), containsInAnyOrder("de", "sk")); assertEquals(AliasUpdateBuilder.create().add(SK).build(), update.getAliases().get("sk")); assertEquals(AliasUpdateBuilder.create().add(DE).add(DE2).build(), update.getAliases().get("de")); } @Test public void testBaseAliasChanges() { TermedDocumentUpdateBuilder builder = TermedDocumentUpdateBuilder.forBaseRevision(ITEM .withAliases("en", Arrays.asList(EN)) .withAliases("de", Arrays.asList(DE)) .withAliases("cs", Arrays.asList(CS)) .withAliases("fr", Arrays.asList(FR)) .withAliases("es", Arrays.asList(ES))); TermedStatementDocumentUpdate update = builder // extend existing alias list .updateAliases("en", AliasUpdateBuilder.create().add(EN2).build()) // new language .updateAliases("sk", AliasUpdateBuilder.create().add(SK).build()) // clear non-existent language .updateAliases("pl", Datamodel.makeAliasUpdate(Collections.emptyList())) // same value .updateAliases("es", Datamodel.makeAliasUpdate(Arrays.asList(ES))) // redundant change .updateAliases("fr", AliasUpdateBuilder.create().add(FR).build()) .build(); assertThat(update.getAliases().keySet(), containsInAnyOrder("sk", "en")); assertEquals(AliasUpdateBuilder.create().add(SK).build(), update.getAliases().get("sk")); assertEquals(AliasUpdateBuilder.create().add(EN2).build(), update.getAliases().get("en")); } @Test public void testMerge() { assertThrows(NullPointerException.class, () -> TermedDocumentUpdateBuilder.forEntityId(Q1).append(null)); TermedDocumentUpdateBuilder builder = TermedDocumentUpdateBuilder.forEntityId(Q1) .updateLabels(TermUpdateBuilder.create().remove("pl").build()) .updateDescriptions(TermUpdateBuilder.create().remove("fr").build()) .updateAliases("en", AliasUpdateBuilder.create().add(EN).build()); builder.append(TermedDocumentUpdateBuilder.forEntityId(Q1) .updateLabels(TermUpdateBuilder.create().remove("sk").build()) .updateDescriptions(TermUpdateBuilder.create().remove("es").build()) .updateAliases("en", AliasUpdateBuilder.create().add(EN2).build()) .updateAliases("de", AliasUpdateBuilder.create().add(DE).build()) .build()); TermedStatementDocumentUpdate update = builder.build(); assertThat(update.getLabels().getRemoved(), containsInAnyOrder("sk", "pl")); assertThat(update.getDescriptions().getRemoved(), containsInAnyOrder("es", "fr")); assertThat(update.getAliases().keySet(), containsInAnyOrder("en", "de")); assertEquals(AliasUpdateBuilder.create().add(EN).add(EN2).build(), update.getAliases().get("en")); assertEquals(AliasUpdateBuilder.create().add(DE).build(), update.getAliases().get("de")); } } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/000077500000000000000000000000001444772566300327105ustar00rootroot00000000000000AliasUpdateImplTest.java000066400000000000000000000227001444772566300373530ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.hamcrest.Matchers; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.AliasUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; public class AliasUpdateImplTest { private static final MonolingualTextValue EN = Datamodel.makeMonolingualTextValue("hello", "en"); private static final MonolingualTextValue EN2 = Datamodel.makeMonolingualTextValue("hi", "en"); private static final MonolingualTextValue EN3 = Datamodel.makeMonolingualTextValue("hey", "en"); private static final MonolingualTextValue EN4 = Datamodel.makeMonolingualTextValue("howdy", "en"); private static final MonolingualTextValue SK = Datamodel.makeMonolingualTextValue("ahoj", "sk"); @Test public void testFields() { AliasUpdate empty = new AliasUpdateImpl(null, Collections.emptyList(), Collections.emptyList()); assertFalse(empty.getLanguageCode().isPresent()); assertFalse(empty.getRecreated().isPresent()); assertThat(empty.getAdded(), is(Matchers.empty())); assertThat(empty.getRemoved(), is(Matchers.empty())); AliasUpdate cleared = new AliasUpdateImpl(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(cleared.getLanguageCode().isPresent()); assertThat(cleared.getRecreated().get(), is(Matchers.empty())); assertThat(cleared.getAdded(), is(Matchers.empty())); assertThat(cleared.getRemoved(), is(Matchers.empty())); AliasUpdate recreated = new AliasUpdateImpl(Arrays.asList(EN, EN2), Collections.emptyList(), Collections.emptyList()); assertEquals("en", recreated.getLanguageCode().get()); assertEquals(Arrays.asList(EN, EN2), recreated.getRecreated().get()); assertThat(recreated.getAdded(), is(Matchers.empty())); assertThat(recreated.getRemoved(), is(Matchers.empty())); AliasUpdate incremental = new AliasUpdateImpl(null, Arrays.asList(EN, EN2), Arrays.asList(EN3)); assertEquals("en", incremental.getLanguageCode().get()); assertFalse(incremental.getRecreated().isPresent()); assertThat(incremental.getAdded(), contains(EN, EN2)); assertThat(incremental.getRemoved(), containsInAnyOrder(EN3)); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> new AliasUpdateImpl(null, null, Collections.emptyList())); assertThrows(NullPointerException.class, () -> new AliasUpdateImpl(null, Collections.emptyList(), null)); assertThrows(NullPointerException.class, () -> new AliasUpdateImpl(Arrays.asList(EN, null), Collections.emptyList(), Collections.emptyList())); assertThrows(NullPointerException.class, () -> new AliasUpdateImpl(null, Arrays.asList(EN, null), Collections.emptyList())); assertThrows(NullPointerException.class, () -> new AliasUpdateImpl(null, Collections.emptyList(), Arrays.asList(EN, null))); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(Collections.emptyList(), Arrays.asList(EN), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(Collections.emptyList(), Collections.emptyList(), Arrays.asList(EN))); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(Arrays.asList(EN, SK), Collections.emptyList(), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(null, Arrays.asList(EN, SK), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(null, Collections.emptyList(), Arrays.asList(EN, SK))); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(null, Arrays.asList(EN), Arrays.asList(SK))); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(Arrays.asList(EN, EN), Collections.emptyList(), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(null, Arrays.asList(EN, EN), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(null, Collections.emptyList(), Arrays.asList(EN, EN))); assertThrows(IllegalArgumentException.class, () -> new AliasUpdateImpl(null, Arrays.asList(EN), Arrays.asList(EN))); } @Test public void testImmutability() { List recreated = new ArrayList<>(); List added = new ArrayList<>(); List removed = new ArrayList<>(); recreated.add(EN); added.add(EN); removed.add(EN2); AliasUpdate update1 = new AliasUpdateImpl(recreated, Collections.emptyList(), Collections.emptyList()); assertThrows(UnsupportedOperationException.class, () -> update1.getRecreated().get().add(EN4)); assertThrows(UnsupportedOperationException.class, () -> update1.getAdded().add(EN4)); assertThrows(UnsupportedOperationException.class, () -> update1.getRemoved().add(EN4)); AliasUpdate update2 = new AliasUpdateImpl(null, added, removed); assertThrows(UnsupportedOperationException.class, () -> update2.getAdded().add(EN4)); assertThrows(UnsupportedOperationException.class, () -> update2.getRemoved().add(EN4)); recreated.add(EN2); added.add(EN3); removed.add(EN4); assertEquals(1, update1.getRecreated().get().size()); assertEquals(1, update2.getAdded().size()); assertEquals(1, update2.getRemoved().size()); } @Test public void testEmpty() { assertTrue(new AliasUpdateImpl(null, Collections.emptyList(), Collections.emptyList()).isEmpty()); assertFalse(new AliasUpdateImpl(null, Arrays.asList(EN), Collections.emptyList()).isEmpty()); assertFalse(new AliasUpdateImpl(null, Collections.emptyList(), Arrays.asList(EN)).isEmpty()); assertFalse(new AliasUpdateImpl(Arrays.asList(EN), Collections.emptyList(), Collections.emptyList()).isEmpty()); assertFalse(new AliasUpdateImpl(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()) .isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { List recreated = Arrays.asList(EN); List added = Arrays.asList(EN2); List removed = Arrays.asList(EN3); AliasUpdate update1 = new AliasUpdateImpl(recreated, Collections.emptyList(), Collections.emptyList()); AliasUpdate update2 = new AliasUpdateImpl(null, added, removed); assertFalse(update1.equals(null)); assertFalse(update2.equals(null)); assertFalse(update1.equals(this)); assertFalse(update2.equals(this)); assertTrue(update1.equals(update1)); assertTrue(update2.equals(update2)); assertFalse(update1.equals(update2)); assertTrue(update1.equals(new AliasUpdateImpl(recreated, Collections.emptyList(), Collections.emptyList()))); assertTrue(update2.equals(new AliasUpdateImpl(null, added, removed))); assertFalse(update1.equals( new AliasUpdateImpl(Arrays.asList(EN2), Collections.emptyList(), Collections.emptyList()))); assertFalse(update2.equals(new AliasUpdateImpl(null, Arrays.asList(EN4), removed))); assertFalse(update2.equals(new AliasUpdateImpl(null, added, Arrays.asList(EN4)))); } @Test public void testHashCode() { AliasUpdate update1a = new AliasUpdateImpl(Arrays.asList(EN), Collections.emptyList(), Collections.emptyList()); AliasUpdate update1b = new AliasUpdateImpl(Arrays.asList(EN), Collections.emptyList(), Collections.emptyList()); AliasUpdate update2a = new AliasUpdateImpl(null, Arrays.asList(EN), Arrays.asList(EN2)); AliasUpdate update2b = new AliasUpdateImpl(null, Arrays.asList(EN), Arrays.asList(EN2)); assertEquals(update1a.hashCode(), update1b.hashCode()); assertEquals(update2a.hashCode(), update2b.hashCode()); } @Test public void testJson() { assertThat(AliasUpdateBuilder.create().build(), producesJson("null")); assertThat(AliasUpdateBuilder.create().recreate(Collections.emptyList()).build(), producesJson("[]")); assertThat(AliasUpdateBuilder.create().recreate(Arrays.asList(EN, EN2)).build(), producesJson("[{'language':'en','value':'hello'},{'language':'en','value':'hi'}]")); assertThat(AliasUpdateBuilder.create().add(EN).build(), producesJson("[{'add':'','language':'en','value':'hello'}]")); assertThat(AliasUpdateBuilder.create().remove(EN).build(), producesJson("[{'language':'en','remove':'','value':'hello'}]")); } } ClaimImplTest.java000066400000000000000000000106351444772566300362100ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; public class ClaimImplTest { private final EntityIdValue subject = new ItemIdValueImpl("Q42", "http://wikidata.org/entity/"); private final ValueSnak mainSnak = new ValueSnakImpl( new PropertyIdValueImpl("P42", "http://wikidata.org/entity/"), subject ); private final Claim c1 = new ClaimImpl(subject, mainSnak, Collections.emptyList()); private final Claim c2 = new ClaimImpl(subject, mainSnak, Collections.emptyList()); @Test public void gettersWorking() { assertEquals(c1.getSubject(), subject); assertEquals(c1.getMainSnak(), mainSnak); assertEquals(c1.getQualifiers(), Collections.emptyList()); } @Test(expected = NullPointerException.class) public void subjectNotNull() { new ClaimImpl(null, mainSnak, Collections.emptyList()); } @Test(expected = NullPointerException.class) public void mainSnakNotNull() { new ClaimImpl(subject, null, Collections.emptyList()); } @Test(expected = NullPointerException.class) public void qualifiersNotNull() { new ClaimImpl(subject, mainSnak, null); } @Test public void hashBasedOnContent() { assertEquals(c1.hashCode(), c2.hashCode()); } @Test public void equalityBasedOnContent() { Claim cDiffSubject, cDiffMainSnak, cDiffQualifiers; EntityIdValue subject2 = new ItemIdValueImpl("Q43", "http://wikidata.org/entity/"); PropertyIdValue property = new PropertyIdValueImpl( "P43", "http://wikidata.org/entity/"); ValueSnak mainSnak2 = new ValueSnakImpl(property, subject2); cDiffSubject = new ClaimImpl(subject2, mainSnak, Collections.emptyList()); cDiffMainSnak = new ClaimImpl(subject, mainSnak2, Collections.emptyList()); cDiffQualifiers = new ClaimImpl(subject, mainSnak, Collections.singletonList(new SnakGroupImpl( Collections. singletonList(mainSnak)))); assertEquals(c1, c1); assertEquals(c1, c2); assertNotEquals(c1, cDiffSubject); assertNotEquals(c1, cDiffMainSnak); assertNotEquals(c1, cDiffQualifiers); assertNotEquals(c1, null); assertNotEquals(c1, this); } @Test public void accessSnakGroups() { EntityIdValue value1 = new ItemIdValueImpl("Q1", "http://wikidata.org/entity/"); EntityIdValue value2 = new ItemIdValueImpl("Q2", "http://wikidata.org/entity/"); PropertyIdValue property1 = new PropertyIdValueImpl("P1", "http://wikidata.org/entity/"); PropertyIdValue property2 = new PropertyIdValueImpl("P2", "http://wikidata.org/entity/"); Snak snak1 = new ValueSnakImpl(property1, value1); Snak snak2 = new ValueSnakImpl(property1, value2); Snak snak3 = new ValueSnakImpl(property2, value2); List snakList1 = new ArrayList<>(); snakList1.add(snak1); snakList1.add(snak2); SnakGroup snakGroup1 = new SnakGroupImpl(snakList1); SnakGroup snakGroup2 = new SnakGroupImpl( Collections.singletonList(snak3)); List snakGroups = new ArrayList<>(); snakGroups.add(snakGroup1); snakGroups.add(snakGroup2); Claim claim = new ClaimImpl(subject, mainSnak, snakGroups); Iterator snaks = claim.getAllQualifiers(); assertTrue(snaks.hasNext()); assertEquals(snak1, snaks.next()); assertTrue(snaks.hasNext()); assertEquals(snak2, snaks.next()); assertTrue(snaks.hasNext()); assertEquals(snak3, snaks.next()); assertFalse(snaks.hasNext()); } } DataObjectFactoryImplTest.java000066400000000000000000000220501444772566300405050ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.math.BigDecimal; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.*; public class DataObjectFactoryImplTest { private final DataObjectFactory factory = new DataObjectFactoryImpl(); @Test public final void testGetItemId() { ItemIdValue o1 = new ItemIdValueImpl("Q42", "foo"); ItemIdValue o2 = factory.getItemIdValue("Q42", "foo"); assertEquals(o1, o2); } @Test public final void testGetLexemeId() { LexemeIdValue o1 = new LexemeIdValueImpl("L42", "foo"); LexemeIdValue o2 = factory.getLexemeIdValue("L42", "foo"); assertEquals(o1, o2); } @Test public final void testGetFormId() { FormIdValue o1 = new FormIdValueImpl("L42-F1", "foo"); FormIdValue o2 = factory.getFormIdValue("L42-F1", "foo"); assertEquals(o1, o2); } @Test public final void testGetSenseId() { SenseIdValue o1 = new SenseIdValueImpl("L42-S1", "foo"); SenseIdValue o2 = factory.getSenseIdValue("L42-S1", "foo"); assertEquals(o1, o2); } @Test public final void testGetPropertyId() { PropertyIdValue o1 = new PropertyIdValueImpl("P42", "foo"); PropertyIdValue o2 = factory.getPropertyIdValue("P42", "foo"); assertEquals(o1, o2); } @Test public final void testGetMediaInfoId() { MediaInfoIdValue o1 = new MediaInfoIdValueImpl("M22", "foo"); MediaInfoIdValue o2 = factory.getMediaInfoIdValue("M22", "foo"); assertEquals(o1, o2); } @Test public final void testGetDatatypeId() { DatatypeIdValue o1 = new DatatypeIdImpl(null, DatatypeIdValue.JSON_DT_TIME); DatatypeIdValue o2 = factory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_TIME); assertEquals(o1, o2); } @Test public final void testGetDatatypeIdWithJsonString() { DatatypeIdValue o1 = new DatatypeIdImpl(DatatypeIdValue.DT_STRING, DatatypeIdImpl.JSON_DT_STRING); DatatypeIdValue o2 = factory.getDatatypeIdValueFromJsonId(DatatypeIdImpl.JSON_DT_STRING); assertEquals(o1, o2); } @Test public final void testGetTimeValue() { TimeValue o1 = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_DAY, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue o2 = factory.getTimeValue(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_DAY, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); assertEquals(o1, o2); } @Test public final void testGetGlobeCoordinatesValue() { GlobeCoordinatesValue o1 = new GlobeCoordinatesValueImpl(90.0, 190.5, GlobeCoordinatesValue.PREC_DECI_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); GlobeCoordinatesValue o2 = factory.getGlobeCoordinatesValue(90.0, 190.5, GlobeCoordinatesValue.PREC_DECI_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); assertEquals(o1, o2); } @Test public final void testGetStringValue() { StringValue o1 = new StringValueImpl("foo"); StringValue o2 = factory.getStringValue("foo"); assertEquals(o1, o2); } @Test public final void testGetMonolingualTextValue() { MonolingualTextValue o1 = new MonolingualTextValueImpl("foo", "en"); MonolingualTextValue o2 = factory.getMonolingualTextValue("foo", "en"); assertEquals(o1, o2); } @Test public final void testGetQuantityValue() { BigDecimal nv = new BigDecimal("0.123456789012345678901234567890123456789"); BigDecimal lb = new BigDecimal("0.123456789012345678901234567890123456788"); BigDecimal ub = new BigDecimal("0.123456789012345678901234567890123456790"); ItemIdValue unit = ItemIdValueImpl.fromIri("http://wikidata.org/entity/Q123"); QuantityValue o1 = new QuantityValueImpl(nv, lb, ub, unit); QuantityValue o2 = factory.getQuantityValue(nv, lb, ub, unit); assertEquals(o1, o2); } @Test public final void testGetQuantityValueNoUnit() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); BigDecimal lb = new BigDecimal( "0.123456789012345678901234567890123456788"); BigDecimal ub = new BigDecimal( "0.123456789012345678901234567890123456790"); QuantityValue o1 = new QuantityValueImpl(nv, lb, ub, (ItemIdValue)null); QuantityValue o2 = factory.getQuantityValue(nv, lb, ub); assertEquals(o1, o2); } @Test public final void testGetQuantityValueNoBounds() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); ItemIdValue unit = ItemIdValueImpl.fromIri("http://wikidata.org/entity/Q2334"); QuantityValue o1 = new QuantityValueImpl(nv, null, null, unit); QuantityValue o2 = factory.getQuantityValue(nv, unit); assertEquals(o1, o2); } @Test public final void testGetQuantityValueNoBoundsAndUnits() { BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); QuantityValue o1 = new QuantityValueImpl(nv, null, null, (ItemIdValue)null); QuantityValue o2 = factory.getQuantityValue(nv); assertEquals(o1, o2); } @Test public final void testGetValueSnak() { ValueSnak o1 = new ValueSnakImpl( factory.getPropertyIdValue("P42", "foo"), factory.getStringValue("foo")); ValueSnak o2 = factory.getValueSnak( factory.getPropertyIdValue("P42", "foo"), factory.getStringValue("foo")); assertEquals(o1, o2); } @Test public final void testGetSomeValueSnak() { SomeValueSnak o1 = new SomeValueSnakImpl(factory.getPropertyIdValue("P42", "foo")); SomeValueSnak o2 = factory.getSomeValueSnak(factory.getPropertyIdValue("P42", "foo")); assertEquals(o1, o2); } @Test public final void testGetNoValueSnak() { NoValueSnak o1 = new NoValueSnakImpl(factory.getPropertyIdValue("P42", "foo")); NoValueSnak o2 = factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")); assertEquals(o1, o2); } @Test public final void testGetSnakGroup() { Snak s = factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")); SnakGroup o1 = new SnakGroupImpl(Collections.singletonList(s)); SnakGroup o2 = factory.getSnakGroup(Collections.singletonList(s)); assertEquals(o1, o2); } @Test public final void testGetClaim() { Claim o1 = new ClaimImpl( factory.getItemIdValue("Q42", "foo"), factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")), Collections.emptyList() ); Claim o2 = factory.getClaim( factory.getItemIdValue("Q42", "foo"), factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")), Collections.emptyList() ); assertEquals(o1, o2); } @Test public final void testGetReference() { Reference r1 = new ReferenceImpl(Collections.emptyList()); Reference r2 = factory.getReference(Collections.emptyList()); assertEquals(r1, r2); } @Test public final void testGetStatementGroup() { Statement s = factory.getStatement( factory.getItemIdValue("Q42", "foo"), factory.getNoValueSnak(factory.getPropertyIdValue("P42", "foo")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "MyId"); StatementGroup o1 = new StatementGroupImpl(Collections.singletonList(s)); StatementGroup o2 = factory.getStatementGroup(Collections.singletonList(s)); assertEquals(o1, o2); } @Test public final void testGetSiteLink() { SiteLink o1 = new SiteLinkImpl("SOLID", "enwiki", Collections.emptyList()); SiteLink o2 = factory.getSiteLink("SOLID", "enwiki", Collections.emptyList()); assertEquals(o1, o2); } @Test public final void testGetPropertyDocument() { PropertyDocument o1 = new PropertyDocumentImpl( factory.getPropertyIdValue("P42", "foo"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), factory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_TIME), 0); PropertyDocument o2 = factory.getPropertyDocument( factory.getPropertyIdValue("P42", "foo"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), factory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_TIME), 0); assertEquals(o1, o2); } @Test public final void testGetItemDocument() { ItemDocument o1 = new ItemDocumentImpl( factory.getItemIdValue("Q42", "foo"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); ItemDocument o2 = factory.getItemDocument( factory.getItemIdValue("Q42", "foo"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), 0); assertEquals(o1, o2); } } DatatypeIdImplTest.java000066400000000000000000000042411444772566300372070ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; public class DatatypeIdImplTest { private final DatatypeIdImpl d1 = new DatatypeIdImpl(DatatypeIdValue.DT_ITEM); private final DatatypeIdImpl d2 = new DatatypeIdImpl("http://wikiba.se/ontology#WikibaseItem"); private final DatatypeIdImpl d3 = new DatatypeIdImpl(DatatypeIdValue.DT_TIME); private final DatatypeIdImpl d4 = new DatatypeIdImpl("http://wikiba.se/ontology#SomeUnknownDatatype", "some-unknownDatatype"); @Test(expected = NullPointerException.class) public void datatypeIdNotNull() { new DatatypeIdImpl((String) null); } @Test public void equalityBasedOnContent() { assertEquals(d1, d1); assertEquals(d1, d2); assertNotEquals(d1, d3); assertNotEquals(d1, null); assertNotEquals(d1, new StringValueImpl("foo")); } @Test public void hashBasedOnContent() { assertEquals(d1.hashCode(), d2.hashCode()); } @Test public void doNotChokeOnUnknownDatatypes() { // for issue https://github.com/Wikidata/Wikidata-Toolkit/issues/716 assertEquals("some-unknownDatatype", d4.getJsonString()); assertEquals("http://wikiba.se/ontology#SomeUnknownDatatype", d4.getIri()); } @Test public void testDeserializeUnknownJsonDatatype() { // for issue https://github.com/Wikidata/Wikidata-Toolkit/issues/716 assertEquals("http://wikiba.se/ontology#LocalMedia", DatatypeIdImpl.getDatatypeIriFromJsonDatatype("localMedia")); } } EntityIdValueImplTest.java000066400000000000000000000035721444772566300377130ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import static org.junit.Assert.assertEquals; public class EntityIdValueImplTest { @Test public void testFromIdItem() { assertEquals(new ItemIdValueImpl("Q42", "http://foo/"), EntityIdValueImpl.fromId("Q42", "http://foo/")); } @Test public void testFromIdProperty() { assertEquals(new PropertyIdValueImpl("P42", "http://foo/"), EntityIdValueImpl.fromId("P42", "http://foo/")); } @Test public void testFromIdLexeme() { assertEquals(new LexemeIdValueImpl("L42", "http://foo/"), EntityIdValueImpl.fromId("L42", "http://foo/")); } @Test public void testFromIdForm() { assertEquals(new FormIdValueImpl("L42-F1", "http://foo/"), EntityIdValueImpl.fromId("L42-F1", "http://foo/")); } @Test public void testFromIdSense() { assertEquals(new SenseIdValueImpl("L42-S1", "http://foo/"), EntityIdValueImpl.fromId("L42-S1", "http://foo/")); } @Test public void testFromIdMediaInfo() { assertEquals(new MediaInfoIdValueImpl("M42", "http://foo/"), EntityIdValueImpl.fromId("M42", "http://foo/")); } @Test(expected = IllegalArgumentException.class) public void testFromIdFailure() { EntityIdValueImpl.fromId("L42-P1", "http://foo/"); } } EntityRedirectDocumentImplTest.java000066400000000000000000000073541444772566300416240ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityRedirectDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import java.io.IOException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; public class EntityRedirectDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final ItemIdValue entityItemId = new ItemIdValueImpl("Q1", "http://example.com/entity/"); private final ItemIdValue targetItemId = new ItemIdValueImpl("Q2", "http://example.com/entity/"); private final EntityRedirectDocument itemRedirect = new EntityRedirectDocumentImpl(entityItemId, targetItemId, 0); private final EntityRedirectDocument itemRedirect2 = new EntityRedirectDocumentImpl(entityItemId, targetItemId, 0); private final EntityRedirectDocument lexemeRedirect = new EntityRedirectDocumentImpl( new LexemeIdValueImpl("L1", "http://example.com/entity/"), new LexemeIdValueImpl("L2", "http://example.com/entity/"), 0 ); private final String JSON_ITEM_REDIRECT = "{\"entity\":\"Q1\",\"redirect\":\"Q2\"}"; @Test public void fieldsAreCorrect() { assertEquals(itemRedirect.getEntityId(), entityItemId); assertEquals(itemRedirect.getTargetId(), targetItemId); } @Test public void equalityBasedOnContent() { EntityRedirectDocumentImpl diffEntity = new EntityRedirectDocumentImpl(targetItemId, targetItemId, 0); EntityRedirectDocumentImpl diffTarget = new EntityRedirectDocumentImpl(entityItemId, entityItemId, 0); EntityRedirectDocumentImpl diffRevisionId = new EntityRedirectDocumentImpl(entityItemId, targetItemId, 1); assertEquals(itemRedirect, itemRedirect2); assertNotEquals(itemRedirect, lexemeRedirect); assertNotEquals(itemRedirect, diffEntity); assertNotEquals(itemRedirect, diffTarget); assertNotEquals(itemRedirect, diffRevisionId); assertNotEquals(itemRedirect, null); assertNotEquals(itemRedirect, this); } @Test public void hashBasedOnContent() { assertEquals(itemRedirect.hashCode(), itemRedirect2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new EntityRedirectDocumentImpl(null, targetItemId, 0); } @Test(expected = NullPointerException.class) public void targetNotNull() { new EntityRedirectDocumentImpl(entityItemId, null, 0); } @Test(expected = IllegalArgumentException.class) public void entityTypeEquality() { new EntityRedirectDocumentImpl(entityItemId, new LexemeIdValueImpl("L1", "http://example.com/entity/"), 0); } @Test public void testRedirectToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_ITEM_REDIRECT, mapper.writeValueAsString(itemRedirect)); } @Test public void testLexemeToJava() throws IOException { assertEquals(itemRedirect, mapper.readValue(JSON_ITEM_REDIRECT, EntityRedirectDocumentImpl.class)); } } EntityUpdateImplTest.java000066400000000000000000000047201444772566300376000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class EntityUpdateImplTest { private static final PropertyIdValue P1 = Datamodel.makeWikidataPropertyIdValue("P1"); private static final PropertyIdValue P2 = Datamodel.makeWikidataPropertyIdValue("P2"); private static EntityUpdate create(PropertyIdValue entityId, long revisionId) { return new PropertyUpdateImpl(entityId, revisionId, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY); } @Test public void testFields() { EntityUpdate update = create(P1, 123); assertEquals(P1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); update = create(P1, 0); assertEquals(0, update.getBaseRevisionId()); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> create(null, 0)); assertThrows(IllegalArgumentException.class, () -> create(PropertyIdValue.NULL, 0)); } @Test public void testEquality() { EntityUpdate update = create(P1, 123); assertTrue(update.equals(update)); assertTrue(update.equals(create(P1, 123))); assertFalse(update.equals(create(P2, 123))); assertFalse(update.equals(create(P1, 777))); } @Test public void testHashCode() { assertEquals(create(P1, 123).hashCode(), create(P1, 123).hashCode()); } } FormDocumentImplTest.java000066400000000000000000000226351444772566300375700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class FormDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final FormIdValue fid = new FormIdValueImpl("L42-F1", "http://example.com/entity/"); private final List gramFeatures = Arrays.asList( new ItemIdValueImpl("Q2", "http://example.com/entity/"), new ItemIdValueImpl("Q1", "http://example.com/entity/") ); private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), fid); private final List statementGroups = Collections.singletonList( new StatementGroupImpl(Collections.singletonList(s)) ); private final MonolingualTextValue rep = new TermImpl("en", "rep"); private final List repList = Collections.singletonList(rep); private final FormDocument fd1 = new FormDocumentImpl(fid, repList, gramFeatures, statementGroups, 1234); private final FormDocument fd2 = new FormDocumentImpl(fid, repList, gramFeatures, statementGroups, 1234); private final String JSON_FORM = "{\"type\":\"form\",\"id\":\"L42-F1\",\"grammaticalFeatures\":[\"Q1\",\"Q2\"],\"representations\":{\"en\":{\"language\":\"en\",\"value\":\"rep\"}},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]},\"lastrevid\":1234}"; @Test public void fieldsAreCorrect() { assertEquals(fd1.getEntityId(), fid); assertEquals(fd1.getRepresentations(), Collections.singletonMap(rep.getLanguageCode(), rep)); assertEquals(fd1.getGrammaticalFeatures(), gramFeatures); assertEquals(fd1.getStatementGroups(), statementGroups); } @Test public void equalityBasedOnContent() { FormDocument irDiffRepresentations = new FormDocumentImpl(fid, Collections.singletonList(new MonolingualTextValueImpl("fr", "bar")), gramFeatures, statementGroups, 1234); FormDocument irDiffGramFeatures = new FormDocumentImpl(fid, repList, Collections.emptyList(), statementGroups, 1234); FormDocument irDiffStatementGroups = new FormDocumentImpl(fid, repList, gramFeatures, Collections.emptyList(), 1234); FormDocument irDiffRevisions = new FormDocumentImpl(fid, repList, gramFeatures, statementGroups, 1235); PropertyDocument pr = new PropertyDocumentImpl( new PropertyIdValueImpl("P42", "foo"), repList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234); FormDocument irDiffFormIdValue = new FormDocumentImpl( new FormIdValueImpl("L42-F2", "http://example.com/entity/"), repList, gramFeatures, Collections.emptyList(), 1235); assertEquals(fd1, fd1); assertEquals(fd1, fd2); assertNotEquals(fd1, irDiffRepresentations); assertNotEquals(fd1, irDiffGramFeatures); assertNotEquals(fd1, irDiffStatementGroups); assertNotEquals(fd1, irDiffRevisions); assertNotEquals(irDiffStatementGroups, irDiffFormIdValue); assertNotEquals(fd1, pr); assertNotEquals(fd1, null); assertNotEquals(fd1, this); } @Test public void hashBasedOnContent() { assertEquals(fd1.hashCode(), fd2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new FormDocumentImpl(null, repList, gramFeatures, statementGroups, 1234); } @Test public void representationsNull() { assertEquals(Collections.emptyMap(), new FormDocumentImpl(fid, null, gramFeatures, statementGroups, 1234).getRepresentations()); } @Test public void representationsEmpty() { assertEquals(Collections.emptyMap(), new FormDocumentImpl(fid, Collections.emptyList(), gramFeatures, statementGroups, 1234).getRepresentations()); } @Test public void grammaticalFeaturesCanBeNull() { FormDocument doc = new FormDocumentImpl(fid, repList, null, statementGroups, 1234); assertTrue(doc.getGrammaticalFeatures().isEmpty()); } @Test public void statementGroupsCanBeNull() { FormDocument doc = new FormDocumentImpl(fid, repList, gramFeatures, null, 1234); assertTrue(doc.getStatementGroups().isEmpty()); } @Test(expected = IllegalArgumentException.class) public void statementGroupsUseSameSubject() { FormIdValue iid2 = new FormIdValueImpl("Q23", "http://example.org/"); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://wikibase.org/entity/")), Collections.emptyList(), Collections.emptyList(), iid2); StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(s2)); List statementGroups2 = new ArrayList<>(); statementGroups2.add(statementGroups.get(0)); statementGroups2.add(sg2); new FormDocumentImpl(fid, repList, gramFeatures, statementGroups2, 1234); } @Test public void iterateOverAllStatements() { Iterator statements = fd1.getAllStatements(); assertTrue(statements.hasNext()); assertEquals(s, statements.next()); assertFalse(statements.hasNext()); } @Test public void testWithEntityId() { assertEquals(FormIdValue.NULL, fd1.withEntityId(FormIdValue.NULL).getEntityId()); FormIdValue id = Datamodel.makeWikidataFormIdValue("L123-F45"); assertEquals(id, fd1.withEntityId(id).getEntityId()); } @Test public void testWithRevisionId() { assertEquals(1235L, fd1.withRevisionId(1235L).getRevisionId()); assertEquals(fd1, fd1.withRevisionId(1325L).withRevisionId(fd1.getRevisionId())); } @Test public void testWithRepresentationInNewLanguage() { MonolingualTextValue newRepresentation = new MonolingualTextValueImpl("Foo", "fr"); FormDocument withRepresentation = fd1.withRepresentation(newRepresentation); assertEquals(newRepresentation, withRepresentation.getRepresentations().get("fr")); } @Test public void testWithNewGrammaticalFeatures() { ItemIdValue newGrammaticalFeature = new ItemIdValueImpl("Q3", "http://example.com/entity/"); FormDocument withGrammaticalFeature = fd1.withGrammaticalFeature(newGrammaticalFeature); assertTrue(withGrammaticalFeature.getGrammaticalFeatures().containsAll(gramFeatures)); assertTrue(withGrammaticalFeature.getGrammaticalFeatures().contains(newGrammaticalFeature)); } @Test public void testWithExistingGrammaticalFeatures() { ItemIdValue newGrammaticalFeature = new ItemIdValueImpl("Q2", "http://example.com/entity/"); FormDocument withGrammaticalFeature = fd1.withGrammaticalFeature(newGrammaticalFeature); assertEquals(fd1, withGrammaticalFeature); } @Test public void testAddStatement() { Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), fid); Claim claim = fresh.getClaim(); assertFalse(fd1.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); FormDocument withStatement = fd1.withStatement(fresh); assertTrue(withStatement.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); } @Test public void testDeleteStatements() { Statement toRemove = statementGroups.get(0).getStatements().get(0); FormDocument withoutStatement = fd1.withoutStatementIds(Collections.singleton(toRemove.getStatementId())); assertNotEquals(withoutStatement, fd1); } @Test public void testFormToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_FORM, mapper.writeValueAsString(fd1)); } @Test public void testFormToJava() throws IOException { assertEquals(fd1, mapper.readValue(JSON_FORM, FormDocumentImpl.class)); } } FormIdValueImplTest.java000066400000000000000000000103311444772566300373310ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.*; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import java.io.IOException; public class FormIdValueImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://www.wikidata.org/entity/"); private final FormIdValueImpl form1 = new FormIdValueImpl("L42-F1", "http://www.wikidata.org/entity/"); private final FormIdValueImpl form2 = new FormIdValueImpl("L42-F1", "http://www.wikidata.org/entity/"); private final FormIdValueImpl form3 = new FormIdValueImpl("L57-F2", "http://www.wikidata.org/entity/"); private final FormIdValueImpl form4 = new FormIdValueImpl("L42-F1", "http://www.example.org/entity/"); private final String JSON_FORM_ID_VALUE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"form\",\"id\":\"L42-F1\"}}"; private final String JSON_FORM_ID_VALUE_WITHOUT_TYPE = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"L42-F1\"}}"; @Test public void entityTypeIsForm() { assertEquals(form1.getEntityType(), EntityIdValue.ET_FORM); } @Test public void iriIsCorrect() { assertEquals(form1.getIri(), "http://www.wikidata.org/entity/L42-F1"); assertEquals(form4.getIri(), "http://www.example.org/entity/L42-F1"); } @Test public void siteIriIsCorrect() { assertEquals(form1.getSiteIri(), "http://www.wikidata.org/entity/"); } @Test public void idIsCorrect() { assertEquals(form1.getId(), "L42-F1"); } @Test public void equalityBasedOnContent() { assertEquals(form1, form1); assertEquals(form1, form2); assertNotEquals(form1, form3); assertNotEquals(form1, form4); assertNotEquals(form1, null); assertNotEquals(form1, this); } @Test public void hashBasedOnContent() { assertEquals(form1.hashCode(), form2.hashCode()); } @Test(expected = RuntimeException.class) public void idValidatedForFirstLetter() { new FormIdValueImpl("Q12345", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForNumber() { new FormIdValueImpl("L34d23", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForLength() { new FormIdValueImpl("L", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForParts() { new FormIdValueImpl("L21", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idNotNull() { new FormIdValueImpl((String)null, "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void baseIriNotNull() { new FormIdValueImpl("L42", null); } @Test public void lexemeIdIsCorrect() { assertEquals(form1.getLexemeId(), new LexemeIdValueImpl("L42", "http://www.wikidata.org/entity/")); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_FORM_ID_VALUE, mapper.writeValueAsString(form1)); } @Test public void testToJava() throws IOException { assertEquals(form1, mapper.readValue(JSON_FORM_ID_VALUE, ValueImpl.class)); } @Test public void testToJavaWithoutNumericalID() throws IOException { assertEquals(form1, mapper.readValue(JSON_FORM_ID_VALUE_WITHOUT_TYPE, ValueImpl.class)); } @Test public void testIsPlaceholder() { assertFalse(form1.isPlaceholder()); } } FormUpdateImplTest.java000066400000000000000000000153751444772566300372370ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.FormUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class FormUpdateImplTest { private static final FormIdValue F1 = Datamodel.makeWikidataFormIdValue("L1-F1"); private static final StatementUpdate STATEMENTS = StatementDocumentUpdateImplTest.STATEMENTS; private static final TermUpdate REPRESENTATIONS = TermUpdateBuilder.create().remove("en").build(); private static final ItemIdValue FEATURE1 = Datamodel.makeWikidataItemIdValue("Q1"); private static final ItemIdValue FEATURE2 = Datamodel.makeWikidataItemIdValue("Q2"); private static final List FEATURES = Arrays.asList(FEATURE1); @Test public void testFields() { FormUpdate update = new FormUpdateImpl(F1, 123, REPRESENTATIONS, FEATURES, STATEMENTS); assertEquals(F1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(REPRESENTATIONS, update.getRepresentations()); assertEquals(new HashSet<>(FEATURES), update.getGrammaticalFeatures().get()); assertSame(STATEMENTS, update.getStatements()); update = new FormUpdateImpl(F1, 123, REPRESENTATIONS, null, STATEMENTS); assertFalse(update.getGrammaticalFeatures().isPresent()); update = new FormUpdateImpl(F1, 123, REPRESENTATIONS, Collections.emptyList(), STATEMENTS); assertThat(update.getGrammaticalFeatures().get(), is(empty())); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> new FormUpdateImpl(F1, 0, null, null, StatementUpdate.EMPTY)); assertThrows(NullPointerException.class, () -> new FormUpdateImpl( F1, 0, TermUpdate.EMPTY, Arrays.asList(FEATURE1, null), StatementUpdate.EMPTY)); assertThrows(IllegalArgumentException.class, () -> new FormUpdateImpl( F1, 0, TermUpdate.EMPTY, Arrays.asList(ItemIdValue.NULL), StatementUpdate.EMPTY)); assertThrows(IllegalArgumentException.class, () -> new FormUpdateImpl( F1, 0, TermUpdate.EMPTY, Arrays.asList(FEATURE1, FEATURE1), StatementUpdate.EMPTY)); } @Test public void testImmutability() { List features = new ArrayList<>(); features.add(FEATURE1); FormUpdate update = new FormUpdateImpl(F1, 123, REPRESENTATIONS, features, STATEMENTS); assertThrows(UnsupportedOperationException.class, () -> update.getGrammaticalFeatures().get().add(FEATURE2)); features.add(FEATURE2); assertEquals(1, update.getGrammaticalFeatures().get().size()); } @Test public void testEmpty() { assertFalse(new FormUpdateImpl(F1, 0, REPRESENTATIONS, null, StatementUpdate.EMPTY).isEmpty()); assertFalse( new FormUpdateImpl(F1, 0, TermUpdate.EMPTY, Collections.emptyList(), StatementUpdate.EMPTY).isEmpty()); assertFalse(new FormUpdateImpl(F1, 0, TermUpdate.EMPTY, FEATURES, StatementUpdate.EMPTY).isEmpty()); assertFalse(new FormUpdateImpl(F1, 0, TermUpdate.EMPTY, null, STATEMENTS).isEmpty()); assertTrue(new FormUpdateImpl(F1, 0, TermUpdate.EMPTY, null, StatementUpdate.EMPTY).isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { FormUpdate update = new FormUpdateImpl(F1, 0, REPRESENTATIONS, FEATURES, STATEMENTS); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new FormUpdateImpl(F1, 0, TermUpdateBuilder.create().remove("en").build(), Arrays.asList(Datamodel.makeWikidataItemIdValue("Q1")), STATEMENTS))); assertFalse(update.equals(new FormUpdateImpl(F1, 123, REPRESENTATIONS, FEATURES, StatementUpdate.EMPTY))); assertFalse(update.equals(new FormUpdateImpl(F1, 123, TermUpdate.EMPTY, FEATURES, STATEMENTS))); assertFalse(update.equals(new FormUpdateImpl(F1, 123, REPRESENTATIONS, null, STATEMENTS))); assertFalse(update.equals(new FormUpdateImpl(F1, 123, REPRESENTATIONS, Collections.emptyList(), STATEMENTS))); assertFalse(new FormUpdateImpl(F1, 123, REPRESENTATIONS, null, STATEMENTS).equals( new FormUpdateImpl(F1, 123, REPRESENTATIONS, Collections.emptyList(), STATEMENTS))); } @Test public void testHashCode() { FormUpdate update1 = new FormUpdateImpl(F1, 123, REPRESENTATIONS, FEATURES, STATEMENTS); FormUpdate update2 = new FormUpdateImpl(F1, 123, TermUpdateBuilder.create().remove("en").build(), Arrays.asList(Datamodel.makeWikidataItemIdValue("Q1")), STATEMENTS); assertEquals(update1.hashCode(), update2.hashCode()); } @Test public void testJson() { assertThat(new FormUpdateImpl(F1, 123, TermUpdate.EMPTY, null, StatementUpdate.EMPTY), producesJson("{}")); assertThat(FormUpdateBuilder.forEntityId(F1).updateRepresentations(REPRESENTATIONS).build(), producesJson("{'representations':" + toJson(REPRESENTATIONS) + "}")); assertThat(FormUpdateBuilder.forEntityId(F1).setGrammaticalFeatures(FEATURES).build(), producesJson("{'grammaticalFeatures':['Q1']}")); assertThat(FormUpdateBuilder.forEntityId(F1).setGrammaticalFeatures(Collections.emptyList()).build(), producesJson("{'grammaticalFeatures':[]}")); assertThat(FormUpdateBuilder.forEntityId(F1).updateStatements(STATEMENTS).build(), producesJson("{'claims':" + toJson(STATEMENTS) + "}")); } } GlobeCoordinatesValueImplTest.java000066400000000000000000000115471444772566300414060ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import java.io.IOException; public class GlobeCoordinatesValueImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final GlobeCoordinatesValue c1 = new GlobeCoordinatesValueImpl(12.3, 14.1, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); private final GlobeCoordinatesValue c2 = new GlobeCoordinatesValueImpl(12.3, 14.1, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); private final GlobeCoordinatesValue c3 = new GlobeCoordinatesValueImpl(12.3, 14.1, GlobeCoordinatesValue.PREC_DEGREE, "earth"); private final String JSON_GLOBE_COORDINATES_VALUE = "{\"type\":\"" + ValueImpl.JSON_VALUE_TYPE_GLOBE_COORDINATES + "\", \"value\":{\"latitude\":12.3,\"longitude\":14.1,\"precision\":1.0,\"globe\":\"http://www.wikidata.org/entity/Q2\"}}"; @Test public void dataIsCorrect() { assertEquals(c1.getLatitude(), 12.3, 0); assertEquals(c1.getLongitude(), 14.1, 0); assertEquals(c1.getPrecision(), GlobeCoordinatesValue.PREC_DEGREE, 0); assertEquals(c1.getGlobe(), GlobeCoordinatesValue.GLOBE_EARTH); } @Test public void getGlobeItemId() { assertEquals(new ItemIdValueImpl("Q2", "http://www.wikidata.org/entity/"), c1.getGlobeItemId()); } @Test(expected = IllegalArgumentException.class) public void getGlobeItemIdInvalidIri() { c3.getGlobeItemId(); } @Test public void equalityBasedOnContent() { GlobeCoordinatesValue gcDiffLatitude = new GlobeCoordinatesValueImpl( 12.1, 14.1, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); GlobeCoordinatesValue gcDiffLongitude = new GlobeCoordinatesValueImpl( 12.3, 14.2, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); GlobeCoordinatesValue gcDiffPrecision = new GlobeCoordinatesValueImpl( 12.3, 14.1, GlobeCoordinatesValue.PREC_MILLI_ARCSECOND, GlobeCoordinatesValue.GLOBE_EARTH); GlobeCoordinatesValue gcDiffGlobe = new GlobeCoordinatesValueImpl(12.3, 14.1, GlobeCoordinatesValue.PREC_DEGREE, "http://wikidata.org/entity/Q367221"); assertEquals(c1, c1); assertEquals(c1, c2); assertNotEquals(c1, gcDiffLatitude); assertNotEquals(c1, gcDiffLongitude); assertNotEquals(c1, gcDiffPrecision); assertNotEquals(c1, gcDiffGlobe); assertNotEquals(c1, null); assertNotEquals(c1, this); } @Test public void hashBasedOnContent() { assertEquals(c1.hashCode(), c2.hashCode()); } @Test(expected = NullPointerException.class) public void globeNotNull() { new GlobeCoordinatesValueImpl(12.3, 14.1, GlobeCoordinatesValue.PREC_DEGREE, null); } @Test public void onlyAllowedPrecisions() { GlobeCoordinatesValue v = new GlobeCoordinatesValueImpl(12.3, 14.1, 0.0, GlobeCoordinatesValue.GLOBE_EARTH); assertTrue(v.getPrecision() > 0.); } @Test(expected = IllegalArgumentException.class) public void latitudeWithinUpperRange() { new GlobeCoordinatesValueImpl(91.0, 270.0, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); } @Test(expected = IllegalArgumentException.class) public void latitudeWithinLowerRange() { new GlobeCoordinatesValueImpl(-91.0, 270.0, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); } @Test(expected = IllegalArgumentException.class) public void longitudeWithinUpperRange() { new GlobeCoordinatesValueImpl(45.0, 500.0, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); } @Test(expected = IllegalArgumentException.class) public void longitudeWithinLowerRange() { new GlobeCoordinatesValueImpl(45.0, -500.0, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_GLOBE_COORDINATES_VALUE, mapper.writeValueAsString(c1)); } @Test public void testToJava() throws IOException { assertEquals(c1, mapper.readValue(JSON_GLOBE_COORDINATES_VALUE, ValueImpl.class)); } } ItemDocumentImplTest.java000066400000000000000000000415101444772566300375540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.*; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; public class ItemDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final ItemIdValue iid = new ItemIdValueImpl("Q42", "http://example.com/entity/"); private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), iid); private final List statementGroups = Collections.singletonList( new StatementGroupImpl(Collections.singletonList(s)) ); private final MonolingualTextValue label = new TermImpl("en", "label"); private final List labelList = Collections.singletonList(label); private final MonolingualTextValue desc = new MonolingualTextValueImpl("des", "fr"); private final List descList = Collections.singletonList(desc); private final MonolingualTextValue alias = new MonolingualTextValueImpl("alias", "de"); private final List aliasList = Collections.singletonList(alias); private final List sitelinks = Collections.singletonList( new SiteLinkImpl("Douglas Adams", "enwiki", Collections.emptyList()) ); private final ItemDocument ir1 = new ItemDocumentImpl(iid, labelList, descList, aliasList, statementGroups, sitelinks, 1234); private final ItemDocument ir2 = new ItemDocumentImpl(iid, labelList, descList, aliasList, statementGroups, sitelinks, 1234); private final String JSON_ITEM_LABEL = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"label\"}},\"descriptions\":{},\"aliases\":{},\"claims\":{},\"sitelinks\":{}}"; private final String JSON_ITEM_DESCRIPTION = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":{},\"descriptions\":{\"fr\":{\"language\":\"fr\",\"value\":\"des\"}},\"aliases\":{},\"claims\":{},\"sitelinks\":{}}"; private final String JSON_ITEM_ALIASES = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":{},\"descriptions\":{},\"aliases\":{\"de\":[{\"language\":\"de\",\"value\":\"alias\"}]},\"claims\":{},\"sitelinks\":{}}"; private final String JSON_ITEM_STATEMENTS = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":{},\"descriptions\":{},\"aliases\":{},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]},\"sitelinks\":{}}"; private final String JSON_ITEM_SITELINKS = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":{},\"descriptions\":{},\"aliases\":{},\"claims\":{},\"sitelinks\":{\"enwiki\":{\"title\":\"Douglas Adams\",\"site\":\"enwiki\",\"badges\":[]}}}"; private final String JSON_ITEM_EMPTY_ARRAYS = "{\"type\":\"item\",\"id\":\"Q42\",\"labels\":[],\"descriptions\":[],\"aliases\":[],\"claims\":[],\"sitelinks\":[]}"; @Test public void fieldsAreCorrect() { assertEquals(ir1.getEntityId(), iid); assertEquals(ir1.getLabels(), Collections.singletonMap(label.getLanguageCode(), label)); assertEquals(ir1.getDescriptions(), Collections.singletonMap(desc.getLanguageCode(), desc)); assertEquals( ir1.getAliases(), Collections.singletonMap(alias.getLanguageCode(), Collections.singletonList(alias)) ); assertEquals(ir1.getStatementGroups(), statementGroups); assertEquals(new ArrayList<>(ir1.getSiteLinks().values()), sitelinks); } @Test public void findTerms() { assertEquals("label", ir1.findLabel("en")); assertNull( ir1.findLabel("ja")); assertEquals("des", ir1.findDescription("fr")); assertNull( ir1.findDescription("ja")); } @Test public void equalityBasedOnContent() { ItemDocument irDiffLabel = new ItemDocumentImpl(iid, Collections.emptyList(), descList, aliasList, statementGroups, sitelinks, 1234); ItemDocument irDiffDesc = new ItemDocumentImpl(iid, labelList, Collections.emptyList(), aliasList, statementGroups, sitelinks, 1234); ItemDocument irDiffAlias = new ItemDocumentImpl(iid, labelList, descList, Collections.emptyList(), statementGroups, sitelinks, 1234); ItemDocument irDiffStatementGroups = new ItemDocumentImpl(iid, labelList, descList, aliasList, Collections.emptyList(), sitelinks, 1234); ItemDocument irDiffSiteLinks = new ItemDocumentImpl(iid, labelList, descList, aliasList, statementGroups, Collections.emptyList(), 1234); ItemDocument irDiffRevisions = new ItemDocumentImpl(iid, labelList, descList, aliasList, statementGroups, sitelinks, 1235); PropertyDocument pr = new PropertyDocumentImpl( new PropertyIdValueImpl("P42", "foo"), labelList, descList, aliasList, Collections.emptyList(), new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234); // we need to use empty lists of Statement groups to test inequality // based on different item ids with all other data being equal ItemDocument irDiffItemIdValue = new ItemDocumentImpl( new ItemIdValueImpl("Q23", "http://example.org/"), labelList, descList, aliasList, Collections.emptyList(), sitelinks, 1234); assertEquals(ir1, ir1); assertEquals(ir1, ir2); assertNotEquals(ir1, irDiffLabel); assertNotEquals(ir1, irDiffDesc); assertNotEquals(ir1, irDiffAlias); assertNotEquals(ir1, irDiffStatementGroups); assertNotEquals(ir1, irDiffSiteLinks); assertNotEquals(ir1, irDiffRevisions); assertNotEquals(irDiffStatementGroups, irDiffItemIdValue); assertNotEquals(ir1, pr); assertNotEquals(ir1, null); assertNotEquals(ir1, this); } @Test public void hashBasedOnContent() { assertEquals(ir1.hashCode(), ir2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new ItemDocumentImpl(null, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), statementGroups, sitelinks, 1234); } @Test public void labelsCanBeNull() { ItemDocument doc = new ItemDocumentImpl(iid, null, Collections.emptyList(), Collections.emptyList(), statementGroups, sitelinks, 1234); assertTrue(doc.getLabels().isEmpty()); } @Test public void descriptionsNotNull() { ItemDocument doc = new ItemDocumentImpl(iid, Collections.emptyList(), null, Collections.emptyList(), statementGroups, sitelinks, 1234); assertTrue(doc.getDescriptions().isEmpty()); } @Test public void aliasesCanBeNull() { ItemDocument doc =new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), null, statementGroups, sitelinks, 1234); assertTrue(doc.getAliases().isEmpty()); } @Test public void statementGroupsCanBeNull() { ItemDocument doc = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), null, sitelinks, 1234); assertTrue(doc.getStatementGroups().isEmpty()); } @Test(expected = IllegalArgumentException.class) public void statementGroupsUseSameSubject() { ItemIdValue iid2 = new ItemIdValueImpl("Q23", "http://example.org/"); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://wikibase.org/entity/")), Collections.emptyList(), Collections.emptyList(), iid2); StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(s2)); List statementGroups2 = new ArrayList<>(); statementGroups2.add(statementGroups.get(0)); statementGroups2.add(sg2); new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), statementGroups2, sitelinks, 1234); } @Test(expected = NullPointerException.class) public void sitelinksNotNull() { new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), statementGroups, null, 1234); } @Test public void iterateOverAllStatements() { Iterator statements = ir1.getAllStatements(); assertTrue(statements.hasNext()); assertEquals(s, statements.next()); assertFalse(statements.hasNext()); } @Test public void testWithEntityId() { assertEquals(ItemIdValue.NULL, ir1.withEntityId(ItemIdValue.NULL).getEntityId()); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q123"); assertEquals(id, ir1.withEntityId(id).getEntityId()); } @Test public void testWithRevisionId() { assertEquals(1235L, ir1.withRevisionId(1235L).getRevisionId()); assertEquals(ir1, ir1.withRevisionId(1325L).withRevisionId(ir1.getRevisionId())); } @Test public void testWithLabelInNewLanguage() { MonolingualTextValue newLabel = new MonolingualTextValueImpl( "Item Q42", "fr"); ItemDocument withLabel = ir1.withLabel(newLabel); assertEquals("Item Q42", withLabel.findLabel("fr")); } @Test public void testWithDescriptionInNewLanguage() { MonolingualTextValue newDescription = new MonolingualTextValueImpl( "l'item 42 bien connu", "fr"); ItemDocument withDescription = ir1.withDescription(newDescription); assertEquals("l'item 42 bien connu", withDescription.findDescription("fr")); } @Test public void testWithOverridenDescription() { MonolingualTextValue newDescription = new MonolingualTextValueImpl( "eine viel bessere Beschreibung", "de"); ItemDocument withDescription = ir1.withDescription(newDescription); assertEquals("eine viel bessere Beschreibung", withDescription.findDescription("de")); } @Test public void testWithAliasInNewLanguage() { MonolingualTextValue newAlias = new MonolingualTextValueImpl( "Item42", "fr"); ItemDocument withAliases = ir1.withAliases("fr", Collections.singletonList(newAlias)); assertEquals(Collections.singletonList(newAlias), withAliases.getAliases().get("fr")); } @Test public void testWithOverridenAliases() { MonolingualTextValue newAlias = new MonolingualTextValueImpl( "A new alias of Q42", "en"); ItemDocument withAlias = ir1.withAliases("en", Collections.singletonList(newAlias)); assertEquals(Collections.singletonList(newAlias), withAlias.getAliases().get("en")); } @Test public void testAddStatement() { Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), iid); Claim claim = fresh.getClaim(); assertFalse(ir1.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); ItemDocument withStatement = ir1.withStatement(fresh); assertTrue(withStatement.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); } @Test public void testDeleteStatements() { Statement toRemove = statementGroups.get(0).getStatements().get(0); ItemDocument withoutStatement = ir1.withoutStatementIds(Collections.singleton(toRemove.getStatementId())); assertNotEquals(withoutStatement, ir1); } @Test public void testLabelsToJson() throws JsonProcessingException { ItemDocumentImpl document = new ItemDocumentImpl(iid, labelList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); JsonComparator.compareJsonStrings(JSON_ITEM_LABEL, mapper.writeValueAsString(document)); } @Test public void testLabelToJava() throws IOException { ItemDocumentImpl document = new ItemDocumentImpl(iid, labelList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); assertEquals(document, mapper.readValue(JSON_ITEM_LABEL, EntityDocumentImpl.class)); } @Test public void testDescriptionsToJson() throws JsonProcessingException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), descList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); JsonComparator.compareJsonStrings(JSON_ITEM_DESCRIPTION, mapper.writeValueAsString(document)); } @Test public void testDescriptionsToJava() throws IOException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), descList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); assertEquals(document, mapper.readValue(JSON_ITEM_DESCRIPTION, EntityDocumentImpl.class)); } @Test public void testAliasesToJson() throws JsonProcessingException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), aliasList, Collections.emptyList(), Collections.emptyList(), 0); JsonComparator.compareJsonStrings(JSON_ITEM_ALIASES, mapper.writeValueAsString(document)); } @Test public void testAliasesToJava() throws IOException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), aliasList, Collections.emptyList(), Collections.emptyList(), 0); assertEquals(document, mapper.readValue(JSON_ITEM_ALIASES, ItemDocumentImpl.class)); } @Test public void testStatementsToJson() throws JsonProcessingException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), statementGroups, Collections.emptyList(), 0); JsonComparator.compareJsonStrings(JSON_ITEM_STATEMENTS, mapper.writeValueAsString(document)); } @Test public void testStatementsToJava() throws IOException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), statementGroups, Collections.emptyList(), 0); assertEquals(document, mapper.readValue(JSON_ITEM_STATEMENTS, ItemDocumentImpl.class)); } @Test public void testSiteLinksToJson() throws JsonProcessingException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), sitelinks, 0); JsonComparator.compareJsonStrings(JSON_ITEM_SITELINKS, mapper.writeValueAsString(document)); } @Test public void testSiteLinksToJava() throws IOException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), sitelinks, 0); assertEquals(document, mapper.readValue(JSON_ITEM_SITELINKS, ItemDocumentImpl.class)); } /** * Checks support of wrong serialization of empty object as empty array */ @Test public void testEmptyArraysForTerms() throws IOException { ItemDocumentImpl document = new ItemDocumentImpl(iid, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); assertEquals(document, mapper.readerFor(ItemDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue(JSON_ITEM_EMPTY_ARRAYS) ); } @Test public void testGetJsonId() throws Exception { ItemDocument item = Datamodel.makeItemDocument( Datamodel.makeWikidataItemIdValue("Q42"), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "label")), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "desc")), Collections.singletonList(Datamodel.makeMonolingualTextValue("en", "alias")), Collections.emptyList(), Collections.singletonMap("enwiki", Datamodel.makeSiteLink("foo", "enwiki", Collections.emptyList()))); assertEquals("Q42", ((ItemDocumentImpl) (item)).getJsonId()); } } ItemIdValueImplTest.java000066400000000000000000000127731444772566300373400ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.*; import java.io.IOException; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedEntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; public class ItemIdValueImplTest { private final ObjectMapper mapper = new DatamodelMapper(Datamodel.SITE_WIKIDATA); private final ItemIdValueImpl item1 = new ItemIdValueImpl("Q42", "http://www.wikidata.org/entity/"); private final ItemIdValueImpl item2 = new ItemIdValueImpl("Q42", "http://www.wikidata.org/entity/"); private final ItemIdValueImpl item3 = new ItemIdValueImpl("Q57", "http://www.wikidata.org/entity/"); private final ItemIdValueImpl item4 = new ItemIdValueImpl("Q42", "http://www.example.org/entity/"); private final String JSON_ITEM_ID_VALUE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"item\",\"numeric-id\":42,\"id\":\"Q42\"}}"; private final String JSON_ITEM_ID_VALUE_WITHOUT_ID = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"item\",\"numeric-id\":\"42\"}}"; private final String JSON_ITEM_ID_VALUE_WITHOUT_NUMERICAL_ID = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"Q42\"}}"; private final String JSON_ITEM_ID_VALUE_WRONG_ID = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"W42\"}}"; private final String JSON_ITEM_ID_VALUE_UNSUPPORTED_TYPE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"foo\",\"numeric-id\":42,\"id\":\"F42\"}}"; private final String JSON_ITEM_ID_VALUE_UNSUPPORTED_NO_ID = "{\"type\":\"wikibase-entityid\",\"value\":{}}"; @Test public void entityTypeIsItem() { assertEquals(item1.getEntityType(), EntityIdValue.ET_ITEM); } @Test public void iriIsCorrect() { assertEquals(item1.getIri(), "http://www.wikidata.org/entity/Q42"); assertEquals(item4.getIri(), "http://www.example.org/entity/Q42"); } @Test public void siteIriIsCorrect() { assertEquals(item1.getSiteIri(), "http://www.wikidata.org/entity/"); } @Test public void idIsCorrect() { assertEquals(item1.getId(), "Q42"); } @Test public void equalityBasedOnContent() { assertEquals(item1, item1); assertEquals(item1, item2); assertNotEquals(item1, item3); assertNotEquals(item1, item4); assertNotEquals(item1, null); assertNotEquals(item1, this); } @Test public void hashBasedOnContent() { assertEquals(item1.hashCode(), item2.hashCode()); } @Test(expected = RuntimeException.class) public void idValidatedForFirstLetter() { new ItemIdValueImpl("P12345", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForNumber() { new ItemIdValueImpl("Q34d23", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForLength() { new ItemIdValueImpl("Q", "http://www.wikidata.org/entity/"); } @Test(expected = RuntimeException.class) public void idNotNull() { new ItemIdValueImpl((String)null, "http://www.wikidata.org/entity/"); } @Test(expected = NullPointerException.class) public void baseIriNotNull() { new ItemIdValueImpl("Q42", null); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_ITEM_ID_VALUE, mapper.writeValueAsString(item1)); } @Test public void testToJava() throws IOException { assertEquals(item1, mapper.readValue(JSON_ITEM_ID_VALUE, ValueImpl.class)); } @Test public void testToJavaWithoutId() throws IOException { assertEquals(item1, mapper.readValue(JSON_ITEM_ID_VALUE_WITHOUT_ID, ValueImpl.class)); } @Test public void testToJavaWithoutNumericalId() throws IOException { assertEquals(item1, mapper.readValue(JSON_ITEM_ID_VALUE_WITHOUT_NUMERICAL_ID, ValueImpl.class)); } @Test public void testToJavaWrongID() throws IOException { Value unsupported = mapper.readValue(JSON_ITEM_ID_VALUE_WRONG_ID, ValueImpl.class); assertTrue(unsupported instanceof UnsupportedEntityIdValue); } @Test public void testToJavaUnsupportedType() throws IOException { Value unsupported = mapper.readValue(JSON_ITEM_ID_VALUE_UNSUPPORTED_TYPE, ValueImpl.class); assertTrue(unsupported instanceof UnsupportedEntityIdValue); assertEquals("foo", ((UnsupportedEntityIdValue)unsupported).getEntityTypeJsonString()); } @Test(expected = JsonMappingException.class) public void testToJavaUnsupportedWithoutId() throws IOException { mapper.readValue(JSON_ITEM_ID_VALUE_UNSUPPORTED_NO_ID, ValueImpl.class); } @Test public void testIsPlaceholder() { assertFalse(item1.isPlaceholder()); } } ItemUpdateImplTest.java000066400000000000000000000174551444772566300372330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.ItemUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class ItemUpdateImplTest { private static final ItemIdValue Q1 = LabeledDocumentUpdateImplTest.JOHN; private static final StatementUpdate STATEMENTS = StatementDocumentUpdateImplTest.STATEMENTS; private static final TermUpdate LABELS = LabeledDocumentUpdateImplTest.LABELS; private static final TermUpdate DESCRIPTIONS = TermedDocumentUpdateImplTest.DESCRIPTIONS; private static final AliasUpdate ALIAS = TermedDocumentUpdateImplTest.ALIAS; private static final Map ALIASES = TermedDocumentUpdateImplTest.ALIASES; private static final SiteLink SITELINK1 = Datamodel.makeSiteLink("Something", "enwiki"); private static final List SITELINKS = Arrays.asList(SITELINK1); private static final List REMOVED_SITELINKS = Arrays.asList("skwiki"); @Test public void testFields() { ItemUpdate update = new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, REMOVED_SITELINKS); assertEquals(Q1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(LABELS, update.getLabels()); assertSame(DESCRIPTIONS, update.getDescriptions()); assertEquals(ALIASES, update.getAliases()); assertSame(STATEMENTS, update.getStatements()); assertThat(update.getModifiedSiteLinks().keySet(), containsInAnyOrder("enwiki")); assertEquals(SITELINK1, update.getModifiedSiteLinks().get("enwiki")); assertThat(update.getRemovedSiteLinks(), containsInAnyOrder("skwiki")); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, null, REMOVED_SITELINKS)); assertThrows(NullPointerException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, null)); assertThrows(NullPointerException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, Arrays.asList(SITELINK1, null), REMOVED_SITELINKS)); assertThrows(NullPointerException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, Arrays.asList("skwiki", null))); assertThrows(IllegalArgumentException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, Arrays.asList("skwiki", " "))); assertThrows(IllegalArgumentException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, Arrays.asList(SITELINK1, SITELINK1), REMOVED_SITELINKS)); assertThrows(IllegalArgumentException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, Arrays.asList("skwiki", "skwiki"))); assertThrows(IllegalArgumentException.class, () -> new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, Arrays.asList("enwiki"))); } @Test public void testEmpty() { ItemUpdate empty = new ItemUpdateImpl(Q1, 123, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList()); assertTrue(empty.isEmpty()); ItemUpdate nonempty1 = new ItemUpdateImpl(Q1, 123, TermUpdate.EMPTY, DESCRIPTIONS, Collections.emptyMap(), StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList()); ItemUpdate nonempty2 = new ItemUpdateImpl(Q1, 123, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY, SITELINKS, Collections.emptyList()); ItemUpdate nonempty3 = new ItemUpdateImpl(Q1, 123, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY, Collections.emptyList(), REMOVED_SITELINKS); assertFalse(nonempty1.isEmpty()); assertFalse(nonempty2.isEmpty()); assertFalse(nonempty3.isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { ItemUpdate update = new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, REMOVED_SITELINKS); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, REMOVED_SITELINKS))); assertFalse(update.equals(new ItemUpdateImpl( Q1, 123, LABELS, TermUpdate.EMPTY, ALIASES, STATEMENTS, SITELINKS, REMOVED_SITELINKS))); assertFalse(update.equals(new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, Collections.emptyList(), REMOVED_SITELINKS))); assertFalse(update.equals(new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, Collections.emptyList()))); } @Test public void testHashCode() { ItemUpdate update1 = new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, REMOVED_SITELINKS); ItemUpdate update2 = new ItemUpdateImpl( Q1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS, SITELINKS, REMOVED_SITELINKS); assertEquals(update1.hashCode(), update2.hashCode()); } @Test public void testJson() { assertThat( new ItemUpdateImpl(Q1, 123, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList()), producesJson("{}")); assertThat(ItemUpdateBuilder.forEntityId(Q1).updateLabels(LABELS).build(), producesJson("{'labels':" + toJson(LABELS) + "}")); assertThat(ItemUpdateBuilder.forEntityId(Q1).updateDescriptions(DESCRIPTIONS).build(), producesJson("{'descriptions':" + toJson(LABELS) + "}")); assertThat(ItemUpdateBuilder.forEntityId(Q1).updateAliases("en", ALIAS).build(), producesJson("{'aliases':{'en':" + toJson(ALIAS) + "}}")); assertThat(ItemUpdateBuilder.forEntityId(Q1).updateStatements(STATEMENTS).build(), producesJson("{'claims':" + toJson(STATEMENTS) + "}")); assertThat(ItemUpdateBuilder.forEntityId(Q1).putSiteLink(SITELINK1).build(), producesJson("{'sitelinks':{'enwiki':" + toJson(SITELINK1) + "}}")); assertThat(ItemUpdateBuilder.forEntityId(Q1).removeSiteLink("enwiki").build(), producesJson("{'sitelinks':{'enwiki':{'remove':'','site':'enwiki'}}}")); } } JsonComparator.java000066400000000000000000000030201444772566300364300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import org.junit.Assert; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * A helper class for comparing JSON objects to each other. * * @author Fredo Erxleben */ public class JsonComparator { private static final ObjectMapper mapper = new ObjectMapper(); /** * Compares two JSON objects represented by Strings to each other. Both * Strings are supposed to be valid JSON. From the given Strings the JSON * tree is build and both trees are compared. */ public static void compareJsonStrings(String expected, String actual) { try { JsonNode tree1 = mapper.readTree(expected); JsonNode tree2 = mapper.readTree(actual); Assert.assertEquals(tree1, tree2); } catch (IOException e) { e.printStackTrace(); } } } JsonTestUtils.java000066400000000000000000000052361444772566300362740ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.fail; import java.io.IOException; import java.util.TreeMap; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; class JsonTestUtils { private static final ObjectMapper mapper = new ObjectMapper(); static { /* * Support for Optional properties. */ mapper.registerModule(new Jdk8Module()); /* * Sort fields by name to produce mostly canonical JSON. * https://cowtowncoder.medium.com/ * jackson-tips-sorting-json-using-jsonnode-ce4476e37aee */ mapper.setNodeFactory(new JsonNodeFactory() { private static final long serialVersionUID = 1L; @Override public ObjectNode objectNode() { return new ObjectNode(this, new TreeMap()); } }); } static String toJson(Object value) { try { String json = mapper.writeValueAsString(value); /* * Canonical form. */ JsonNode tree = mapper.readTree(json); return mapper.writeValueAsString(tree); } catch (IOException ex) { fail("JSON serialization failed."); return null; } } private static class JsonMatcher extends BaseMatcher { private final String expected; JsonMatcher(String expected) { this.expected = expected.replace('\'', '"'); } @Override public boolean matches(Object actual) { return toJson(actual).equals(expected); } @Override public void describeTo(Description description) { description.appendText(expected); } @Override public void describeMismatch(Object item, Description description) { description.appendText("was ").appendText(toJson(item)); } } static Matcher producesJson(String json) { return new JsonMatcher<>(json); } } LabeledDocumentUpdateImplTest.java000066400000000000000000000074161444772566300413600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.util.Collection; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class LabeledDocumentUpdateImplTest { static final ItemIdValue JOHN = StatementUpdateImplTest.JOHN; private static final Statement JOHN_HAS_BROWN_HAIR = StatementUpdateImplTest.JOHN_HAS_BROWN_HAIR; private static final Collection NO_SITELINKS = Collections.emptyList(); private static final Collection NO_REMOVED_SITELINKS = Collections.emptyList(); private static final StatementUpdate STATEMENTS = StatementUpdateBuilder.create().add(JOHN_HAS_BROWN_HAIR).build(); static final TermUpdate LABELS = TermUpdateBuilder.create().remove("en").build(); private static LabeledStatementDocumentUpdate create( ItemIdValue entityId, long revisionId, StatementUpdate statements, TermUpdate labels) { return new ItemUpdateImpl(entityId, revisionId, labels, TermUpdate.EMPTY, Collections.emptyMap(), statements, NO_SITELINKS, NO_REMOVED_SITELINKS); } @Test public void testFields() { LabeledStatementDocumentUpdate update = create(JOHN, 123, STATEMENTS, LABELS); assertEquals(JOHN, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(STATEMENTS, update.getStatements()); assertSame(LABELS, update.getLabels()); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> create(JOHN, 0, StatementUpdate.EMPTY, null)); } @Test public void testEmpty() { assertFalse(create(JOHN, 0, STATEMENTS, TermUpdate.EMPTY).isEmpty()); assertFalse(create(JOHN, 0, StatementUpdate.EMPTY, LABELS).isEmpty()); assertTrue(create(JOHN, 0, StatementUpdate.EMPTY, TermUpdate.EMPTY).isEmpty()); } @Test public void testEquality() { LabeledStatementDocumentUpdate update = create(JOHN, 0, STATEMENTS, LABELS); assertTrue(update.equals(update)); assertTrue(update.equals(create(JOHN, 0, STATEMENTS, TermUpdateBuilder.create().remove("en").build()))); assertFalse(update.equals(create(JOHN, 123, StatementUpdate.EMPTY, LABELS))); assertFalse(update.equals(create(JOHN, 123, STATEMENTS, TermUpdate.EMPTY))); } @Test public void testHashCode() { LabeledStatementDocumentUpdate update1 = create(JOHN, 123, STATEMENTS, LABELS); LabeledStatementDocumentUpdate update2 = create(JOHN, 123, STATEMENTS, TermUpdateBuilder.create().remove("en").build()); assertEquals(update1.hashCode(), update2.hashCode()); } } LexemeDocumentImplTest.java000066400000000000000000000327111444772566300401000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.*; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import static org.junit.Assert.*; public class LexemeDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final LexemeIdValue lid = new LexemeIdValueImpl("L42", "http://example.com/entity/"); private final ItemIdValue lexCat = new ItemIdValueImpl("Q1", "http://example.com/entity/"); private final ItemIdValue language = new ItemIdValueImpl("Q2", "http://example.com/entity/"); private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), lid); private final List statementGroups = Collections.singletonList( new StatementGroupImpl(Collections.singletonList(s)) ); private final MonolingualTextValue lemma = new TermImpl("en", "lemma"); private final List lemmaList = Collections.singletonList(lemma); private final FormDocument form = new FormDocumentImpl( new FormIdValueImpl("L42-F1", "http://example.com/entity/"), Collections.singletonList(new TermImpl("en", "foo")), Collections.emptyList(), Collections.emptyList(), 0 ); private final List forms = Collections.singletonList(form); private final SenseDocument sense = new SenseDocumentImpl( new SenseIdValueImpl("L42-S1", "http://example.com/entity/"), Collections.singletonList(new TermImpl("en", "foo meaning")), Collections.emptyList(), 0 ); private final List senses = Collections.singletonList(sense); private final LexemeDocument ld1 = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, forms, senses, 1234); private final LexemeDocument ld2 = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, forms, senses, 1234); private final LexemeDocument ld3 = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, Collections.emptyList(), Collections.emptyList(), 1234); private final String JSON_LEXEME = "{\"type\":\"lexeme\",\"id\":\"L42\",\"lexicalCategory\":\"Q1\",\"language\":\"Q2\",\"lemmas\":{\"en\":{\"language\":\"en\",\"value\":\"lemma\"}},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]},\"forms\":[{\"type\":\"form\",\"id\":\"L42-F1\",\"representations\":{\"en\":{\"language\":\"en\",\"value\":\"foo\"}},\"grammaticalFeatures\":[],\"claims\":{}}],\"senses\":[{\"type\":\"sense\",\"id\":\"L42-S1\",\"glosses\":{\"en\":{\"language\":\"en\",\"value\":\"foo meaning\"}},\"claims\":{}}],\"lastrevid\":1234}"; private final String JSON_LEXEME_FOR_ISSUE_568 = "{\"type\":\"lexeme\",\"id\":\"L42\",\"lexicalCategory\":\"Q1\",\"language\":\"Q2\",\"lemmas\":{\"en\":{\"language\":\"en\",\"value\":\"lemma\"}},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]},\"forms\":{},\"senses\":{},\"lastrevid\":1234}"; @Test public void fieldsAreCorrect() { assertEquals(ld1.getEntityId(), lid); assertEquals(ld1.getLanguage(), language); assertEquals(ld1.getLexicalCategory(), lexCat); assertEquals(ld1.getLemmas(), Collections.singletonMap(lemma.getLanguageCode(), lemma)); assertEquals(ld1.getStatementGroups(), statementGroups); assertEquals(ld1.getForms(), forms); } @Test public void formGetter() { assertEquals(form, ld1.getForm(form.getEntityId())); } @Test(expected=IndexOutOfBoundsException.class) public void formGetterNotFound() { ld1.getForm(new FormIdValueImpl("L42-F2", "http://example.com/entity/")); } @Test public void senseGetter() { assertEquals(sense, ld1.getSense(sense.getEntityId())); } @Test(expected=IndexOutOfBoundsException.class) public void senseGetterNotFound() { ld1.getSense(new SenseIdValueImpl("L42-S2", "http://example.com/entity/")); } @Test public void equalityBasedOnContent() { LexemeDocument irDiffLexCat = new LexemeDocumentImpl(lid, language, language, lemmaList, statementGroups, forms, senses, 1234); LexemeDocument irDiffLanguage = new LexemeDocumentImpl(lid, lexCat, lexCat, lemmaList, statementGroups, forms, senses, 1234); LexemeDocument irDiffLemmas = new LexemeDocumentImpl(lid, lexCat, language, Collections.singletonList(new TermImpl("en", "bar")), statementGroups, forms, senses, 1234); LexemeDocument irDiffStatementGroups = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, Collections.emptyList(), forms, senses, 1234); LexemeDocument irDiffForms = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, Collections.emptyList(), senses, 1234); LexemeDocument irDiffSenses = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, forms, Collections.emptyList(), 1234); LexemeDocument irDiffRevisions = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, forms, senses, 1235); PropertyDocument pr = new PropertyDocumentImpl( new PropertyIdValueImpl("P42", "foo"), lemmaList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234); LexemeDocument irDiffLexemeIdValue = new LexemeDocumentImpl( new LexemeIdValueImpl("L43", "http://example.com/entity/"), lexCat, language, lemmaList, Collections.emptyList(), forms, senses, 1235); assertEquals(ld1, ld1); assertEquals(ld1, ld2); assertNotEquals(ld1, irDiffLexCat); assertNotEquals(ld1, irDiffLanguage); assertNotEquals(ld1, irDiffLemmas); assertNotEquals(ld1, irDiffStatementGroups); assertNotEquals(ld1, irDiffForms); assertNotEquals(ld1, irDiffSenses); assertNotEquals(ld1, irDiffRevisions); assertNotEquals(irDiffStatementGroups, irDiffLexemeIdValue); assertNotEquals(ld1, pr); assertNotEquals(ld1, null); assertNotEquals(ld1, this); } @Test public void hashBasedOnContent() { assertEquals(ld1.hashCode(), ld2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new LexemeDocumentImpl(null, lexCat, language, lemmaList, statementGroups, forms, senses, 1234); } @Test(expected = NullPointerException.class) public void lexicalCategoryNotNull() { new LexemeDocumentImpl(lid, null, language, lemmaList, statementGroups, forms, senses, 1234); } @Test(expected = NullPointerException.class) public void languageNotNull() { new LexemeDocumentImpl(lid, lexCat, null, lemmaList, statementGroups, forms, senses, 1234); } @Test public void lemmasNull() { assertEquals(Collections.emptyMap(), new LexemeDocumentImpl(lid, lexCat, language, null, statementGroups, forms, senses, 1234).getLemmas()); } @Test public void lemmasEmpty() { assertEquals(Collections.emptyMap(), new LexemeDocumentImpl(lid, lexCat, language, Collections.emptyList(), statementGroups, forms, senses, 1234).getLemmas()); } @Test public void statementGroupsCanBeNull() { LexemeDocument doc = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, null, forms, senses, 1234); assertTrue(doc.getStatementGroups().isEmpty()); } @Test(expected = IllegalArgumentException.class) public void statementGroupsUseSameSubject() { LexemeIdValue iid2 = new LexemeIdValueImpl("Q23", "http://example.org/"); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://wikibase.org/entity/")), Collections.emptyList(), Collections.emptyList(), iid2); StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(s2)); List statementGroups2 = new ArrayList<>(); statementGroups2.add(statementGroups.get(0)); statementGroups2.add(sg2); new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups2, forms, senses, 1234); } @Test public void iterateOverAllStatements() { Iterator statements = ld1.getAllStatements(); assertTrue(statements.hasNext()); assertEquals(s, statements.next()); assertFalse(statements.hasNext()); } @Test public void formsCanBeNull() { LexemeDocument doc = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, null, senses, 1234); assertTrue(doc.getForms().isEmpty()); } @Test public void sensesCanBeNull() { LexemeDocument doc = new LexemeDocumentImpl(lid, lexCat, language, lemmaList, statementGroups, forms, null, 1234); assertTrue(doc.getSenses().isEmpty()); } @Test public void testWithEntityId() { assertEquals(LexemeIdValue.NULL, ld1.withEntityId(LexemeIdValue.NULL).getEntityId()); LexemeIdValue id = Datamodel.makeWikidataLexemeIdValue("L123"); assertEquals(id, ld1.withEntityId(id).getEntityId()); } @Test public void testWithRevisionId() { assertEquals(1235L, ld1.withRevisionId(1235L).getRevisionId()); assertEquals(ld1, ld1.withRevisionId(1325L).withRevisionId(ld1.getRevisionId())); } @Test public void testWithLexicalCategory() { ItemIdValue newLexicalCategory = new ItemIdValueImpl("Q142", "http://example.com/entity/"); LexemeDocument withLexicalCategory = ld1.withLexicalCategory(newLexicalCategory); assertEquals(newLexicalCategory, withLexicalCategory.getLexicalCategory()); } @Test public void testWithLanguage() { ItemIdValue newLanguage = new ItemIdValueImpl("Q242", "http://example.com/entity/"); LexemeDocument withLanguage = ld1.withLanguage(newLanguage); assertEquals(newLanguage, withLanguage.getLanguage()); } @Test public void testWithLemmaInNewLanguage() { MonolingualTextValue newLemma = new MonolingualTextValueImpl("Foo", "fr"); LexemeDocument withLemma = ld1.withLemma(newLemma); assertEquals(newLemma, withLemma.getLemmas().get("fr")); } @Test public void testAddStatement() { Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), lid); Claim claim = fresh.getClaim(); assertFalse(ld1.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); LexemeDocument withStatement = ld1.withStatement(fresh); assertTrue(withStatement.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); } @Test public void testDeleteStatements() { Statement toRemove = statementGroups.get(0).getStatements().get(0); LexemeDocument withoutStatement = ld1.withoutStatementIds(Collections.singleton(toRemove.getStatementId())); assertNotEquals(withoutStatement, ld1); } @Test public void testWithForm() { FormDocument newForm = ld1.createForm(Collections.singletonList(new TermImpl("en", "add1"))); assertEquals(lid, newForm.getEntityId().getLexemeId()); assertEquals(ld1.getForms().size() + 1, ld1.withForm(newForm).getForms().size()); assertEquals(newForm, ld1.withForm(newForm).getForm(newForm.getEntityId())); } @Test(expected = IllegalArgumentException.class) public void testWithWrongFormId() { ld1.withForm(Datamodel.makeFormDocument( Datamodel.makeFormIdValue("L444-F32","http://example.com/entity/"), Collections.singletonList(new TermImpl("en", "add1")), Collections.emptyList(), Collections.emptyList() )); } @Test public void testWithSense() { SenseDocument newSense = ld1.createSense(Collections.singletonList(new TermImpl("en", "add1"))); assertEquals(lid, newSense.getEntityId().getLexemeId()); assertEquals(ld1.getSenses().size() + 1, ld1.withSense(newSense).getSenses().size()); assertEquals(newSense, ld1.withSense(newSense).getSense(newSense.getEntityId())); } @Test(expected = IllegalArgumentException.class) public void testWithWrongSenseId() { ld1.withSense(Datamodel.makeSenseDocument( Datamodel.makeSenseIdValue("L444-S32","http://example.com/entity/"), Collections.singletonList(new TermImpl("en", "add1")), Collections.emptyList() )); } @Test public void testLexemeToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_LEXEME, mapper.writeValueAsString(ld1)); } @Test public void testLexemeToJava() throws IOException { assertEquals(ld1, mapper.readValue(JSON_LEXEME, LexemeDocumentImpl.class)); } @Test public void testDeserializeLexemeWithJsonObjectInPlaceOfEmptyList() throws JsonProcessingException { // test for https://github.com/Wikidata/Wikidata-Toolkit/issues/568 // phab: https://phabricator.wikimedia.org/T305660 assertEquals(ld3, mapper.readValue(JSON_LEXEME_FOR_ISSUE_568, LexemeDocumentImpl.class)); } } LexemeIdValueImplTest.java000066400000000000000000000101701444772566300376460ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import java.io.IOException; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class LexemeIdValueImplTest { private final ObjectMapper mapper = new DatamodelMapper(Datamodel.SITE_WIKIDATA); private final LexemeIdValueImpl lexeme1 = new LexemeIdValueImpl("L42", "http://www.wikidata.org/entity/"); private final LexemeIdValueImpl lexeme2 = new LexemeIdValueImpl("L42", "http://www.wikidata.org/entity/"); private final LexemeIdValueImpl lexeme3 = new LexemeIdValueImpl("L57", "http://www.wikidata.org/entity/"); private final LexemeIdValueImpl lexeme4 = new LexemeIdValueImpl("L42", "http://www.example.org/entity/"); private final String JSON_LEXEME_ID_VALUE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"lexeme\",\"numeric-id\":42,\"id\":\"L42\"}}"; private final String JSON_LEXEME_ID_VALUE_WITHOUT_NUMERICAL_ID = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"L42\"}}"; @Test public void entityTypeIsLexeme() { assertEquals(lexeme1.getEntityType(), EntityIdValue.ET_LEXEME); } @Test public void iriIsCorrect() { assertEquals(lexeme1.getIri(), "http://www.wikidata.org/entity/L42"); assertEquals(lexeme4.getIri(), "http://www.example.org/entity/L42"); } @Test public void siteIriIsCorrect() { assertEquals(lexeme1.getSiteIri(), "http://www.wikidata.org/entity/"); } @Test public void idIsCorrect() { assertEquals(lexeme1.getId(), "L42"); } @Test public void equalityBasedOnContent() { assertEquals(lexeme1, lexeme1); assertEquals(lexeme1, lexeme2); assertNotEquals(lexeme1, lexeme3); assertNotEquals(lexeme1, lexeme4); assertNotEquals(lexeme1, null); assertNotEquals(lexeme1, this); } @Test public void hashBasedOnContent() { assertEquals(lexeme1.hashCode(), lexeme2.hashCode()); } @Test(expected = RuntimeException.class) public void idValidatedForFirstLetter() { new LexemeIdValueImpl("Q12345", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForNumber() { new LexemeIdValueImpl("L34d23", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForLength() { new LexemeIdValueImpl("L", "http://www.wikidata.org/entity/"); } @Test(expected = RuntimeException.class) public void idNotNull() { new LexemeIdValueImpl((String)null, "http://www.wikidata.org/entity/"); } @Test(expected = NullPointerException.class) public void baseIriNotNull() { new LexemeIdValueImpl("L42", null); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_LEXEME_ID_VALUE, mapper.writeValueAsString(lexeme1)); } @Test public void testToJava() throws IOException { assertEquals(lexeme1, mapper.readValue(JSON_LEXEME_ID_VALUE, ValueImpl.class)); } @Test public void testToJavaWithoutNumericalID() throws IOException { assertEquals(lexeme1, mapper.readValue(JSON_LEXEME_ID_VALUE_WITHOUT_NUMERICAL_ID, ValueImpl.class)); } @Test public void testIsPlaceholder() { assertFalse(lexeme1.isPlaceholder()); } } LexemeUpdateImplTest.java000066400000000000000000000530001444772566300375360ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.FormUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.LexemeUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.SenseUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormIdValue; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class LexemeUpdateImplTest { private static final LexemeIdValue L1 = Datamodel.makeWikidataLexemeIdValue("L1"); private static final StatementUpdate STATEMENTS = StatementDocumentUpdateImplTest.STATEMENTS; private static final TermUpdate LEMMAS = TermUpdateBuilder.create().remove("en").build(); private static final ItemIdValue Q1 = Datamodel.makeWikidataItemIdValue("Q1"); private static final ItemIdValue Q2 = Datamodel.makeWikidataItemIdValue("Q2"); private static final SenseIdValue S1 = Datamodel.makeWikidataSenseIdValue("L1-S1"); private static final SenseIdValue S2 = Datamodel.makeWikidataSenseIdValue("L1-S2"); private static final SenseIdValue S3 = Datamodel.makeWikidataSenseIdValue("L1-S3"); private static final SenseDocument ADDED_SENSE = Datamodel.makeSenseDocument( SenseIdValue.NULL, Collections.emptyList(), Collections.emptyList()); private static final List ADDED_SENSES = Arrays.asList(ADDED_SENSE); private static final SenseUpdate UPDATED_SENSE = SenseUpdateBuilder.forEntityId(S1) .updateStatements(STATEMENTS) .build(); private static final SenseUpdate UPDATED_SENSE_REVISION = SenseUpdateBuilder.forBaseRevisionId(S1, 123) .append(UPDATED_SENSE) .build(); private static final List UPDATED_SENSES = Arrays.asList(UPDATED_SENSE); private static final List UPDATED_SENSE_REVISIONS = Arrays.asList(UPDATED_SENSE_REVISION); private static final List REMOVED_SENSES = Arrays.asList(S2); private static final FormIdValue F1 = Datamodel.makeWikidataFormIdValue("L1-F1"); private static final FormIdValue F2 = Datamodel.makeWikidataFormIdValue("L1-F2"); private static final FormIdValue F3 = Datamodel.makeWikidataFormIdValue("L1-F3"); private static final FormDocument ADDED_FORM = Datamodel.makeFormDocument( FormIdValue.NULL, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); private static final List ADDED_FORMS = Arrays.asList(ADDED_FORM); private static final FormUpdate UPDATED_FORM = FormUpdateBuilder.forEntityId(F1) .updateStatements(STATEMENTS) .build(); private static final FormUpdate UPDATED_FORM_REVISION = FormUpdateBuilder.forBaseRevisionId(F1, 123) .append(UPDATED_FORM) .build(); private static final List UPDATED_FORMS = Arrays.asList(UPDATED_FORM); private static final List UPDATED_FORM_REVISIONS = Arrays.asList(UPDATED_FORM_REVISION); private static final List REMOVED_FORMS = Arrays.asList(F2); @Test public void testFields() { LexemeUpdate update = new LexemeUpdateImpl(L1, 123, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSE_REVISIONS, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORM_REVISIONS, REMOVED_FORMS); assertEquals(L1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertEquals(Q1, update.getLanguage().get()); assertEquals(Q2, update.getLexicalCategory().get()); assertSame(LEMMAS, update.getLemmas()); assertSame(STATEMENTS, update.getStatements()); assertEquals(ADDED_SENSES, update.getAddedSenses()); assertThat(update.getUpdatedSenses().keySet(), containsInAnyOrder(S1)); assertEquals(UPDATED_SENSE_REVISION, update.getUpdatedSenses().get(S1)); assertThat(update.getRemovedSenses(), containsInAnyOrder(S2)); assertEquals(ADDED_FORMS, update.getAddedForms()); assertThat(update.getUpdatedForms().keySet(), containsInAnyOrder(F1)); assertEquals(UPDATED_FORM_REVISION, update.getUpdatedForms().get(F1)); assertThat(update.getRemovedForms(), containsInAnyOrder(F2)); update = new LexemeUpdateImpl(L1, 123, null, null, LEMMAS, STATEMENTS, ADDED_SENSES, Arrays.asList(SenseUpdateBuilder.forBaseRevisionId(S1, 123).build()), REMOVED_SENSES, ADDED_FORMS, Arrays.asList(FormUpdateBuilder.forBaseRevisionId(F1, 123).build()), REMOVED_FORMS); assertFalse(update.getLanguage().isPresent()); assertFalse(update.getLexicalCategory().isPresent()); assertThat(update.getUpdatedSenses(), is(anEmptyMap())); assertThat(update.getUpdatedForms(), is(anEmptyMap())); } @Test public void testValidation() { // null parameter assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, null, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, null, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, null, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, null, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, null, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, null, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, null, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, null)); // null item assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, Arrays.asList(ADDED_SENSE, null), UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, Arrays.asList(UPDATED_SENSE, null), REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, Arrays.asList(S2, null), ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, Arrays.asList(ADDED_FORM, null), UPDATED_FORMS, REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, Arrays.asList(UPDATED_FORM, null), REMOVED_FORMS)); assertThrows(NullPointerException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, Arrays.asList(F2, null))); // placeholder ID assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, ItemIdValue.NULL, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, ItemIdValue.NULL, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, Arrays.asList(S2, SenseIdValue.NULL), ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, Arrays.asList(F2, FormIdValue.NULL))); // expected placeholder ID assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, Arrays.asList(ADDED_SENSE.withEntityId(S3)), UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, Arrays.asList(ADDED_FORM.withEntityId(F3)), UPDATED_FORMS, REMOVED_FORMS)); // unique IDs assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, Arrays.asList(UPDATED_SENSE, UPDATED_SENSE), REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, Arrays.asList(S2, S2), ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, Arrays.asList(S1), ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, Arrays.asList(UPDATED_FORM, UPDATED_FORM), REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, Arrays.asList(F2, F2))); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, Arrays.asList(F1))); // consistent revision assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSE_REVISIONS, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS)); assertThrows(IllegalArgumentException.class, () -> new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORM_REVISIONS, REMOVED_FORMS)); } @Test public void testImmutability() { List addedSenses = new ArrayList<>(ADDED_SENSES); List updatedSenses = new ArrayList<>(UPDATED_SENSE_REVISIONS); List removedSenses = new ArrayList<>(REMOVED_SENSES); List addedForms = new ArrayList<>(ADDED_FORMS); List updatedForms = new ArrayList<>(UPDATED_FORM_REVISIONS); List removedForms = new ArrayList<>(REMOVED_FORMS); LexemeUpdate update = new LexemeUpdateImpl(L1, 123, Q1, Q2, LEMMAS, STATEMENTS, addedSenses, updatedSenses, removedSenses, addedForms, updatedForms, removedForms); assertThrows(UnsupportedOperationException.class, () -> update.getAddedSenses().clear()); assertThrows(UnsupportedOperationException.class, () -> update.getUpdatedSenses().clear()); assertThrows(UnsupportedOperationException.class, () -> update.getRemovedSenses().clear()); assertThrows(UnsupportedOperationException.class, () -> update.getAddedForms().clear()); assertThrows(UnsupportedOperationException.class, () -> update.getUpdatedForms().clear()); assertThrows(UnsupportedOperationException.class, () -> update.getRemovedForms().clear()); addedSenses.clear(); updatedSenses.clear(); removedSenses.clear(); addedForms.clear(); updatedForms.clear(); removedForms.clear(); assertEquals(1, update.getAddedSenses().size()); assertEquals(1, update.getUpdatedSenses().size()); assertEquals(1, update.getRemovedSenses().size()); assertEquals(1, update.getAddedForms().size()); assertEquals(1, update.getUpdatedForms().size()); assertEquals(1, update.getRemovedForms().size()); } @Test public void testEmpty() { LexemeUpdate update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertTrue(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, Q1, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, Q2, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, LEMMAS, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, STATEMENTS, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, ADDED_SENSES, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), UPDATED_SENSES, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), REMOVED_SENSES, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), ADDED_FORMS, Collections.emptyList(), Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), UPDATED_FORMS, Collections.emptyList()); assertFalse(update.isEmpty()); update = new LexemeUpdateImpl(L1, 0, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), REMOVED_FORMS); assertFalse(update.isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { LexemeUpdate update = new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, null, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, null, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, TermUpdate.EMPTY, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, StatementUpdate.EMPTY, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, Collections.emptyList(), UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, Collections.emptyList(), REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, Collections.emptyList(), ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, Collections.emptyList(), UPDATED_FORMS, REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, Collections.emptyList(), REMOVED_FORMS))); assertFalse(update.equals(new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, Collections.emptyList()))); } @Test public void testHashCode() { LexemeUpdate update1 = new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS); LexemeUpdate update2 = new LexemeUpdateImpl(L1, 0, Q1, Q2, LEMMAS, STATEMENTS, ADDED_SENSES, UPDATED_SENSES, REMOVED_SENSES, ADDED_FORMS, UPDATED_FORMS, REMOVED_FORMS); assertEquals(update1.hashCode(), update2.hashCode()); } @Test public void testJson() { assertThat( new LexemeUpdateImpl(L1, 123, null, null, TermUpdate.EMPTY, StatementUpdate.EMPTY, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()), producesJson("{}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).setLanguage(Q1).build(), producesJson("{'language':'Q1'}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).setLexicalCategory(Q2).build(), producesJson("{'lexicalCategory':'Q2'}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).updateLemmas(LEMMAS).build(), producesJson("{'lemmas':" + toJson(LEMMAS) + "}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).updateStatements(STATEMENTS).build(), producesJson("{'claims':" + toJson(STATEMENTS) + "}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).addSense(ADDED_SENSE).build(), producesJson("{'senses':[{'add':''," + toJson(ADDED_SENSE).substring(1) + "]}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).updateSense(UPDATED_SENSE).build(), producesJson("{'senses':[" + toJson(UPDATED_SENSE) + "]}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).removeSense(S2).build(), producesJson("{'senses':[{'id':'L1-S2','remove':''}]}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).addForm(ADDED_FORM).build(), producesJson("{'forms':[{'add':''," + toJson(ADDED_FORM).substring(1) + "]}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).updateForm(UPDATED_FORM).build(), producesJson("{'forms':[" + toJson(UPDATED_FORM) + "]}")); assertThat(LexemeUpdateBuilder.forEntityId(L1).removeForm(F2).build(), producesJson("{'forms':[{'id':'L1-F2','remove':''}]}")); } } MediaInfoDocumentImplTest.java000066400000000000000000000253331444772566300405160ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.*; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import static org.junit.Assert.*; public class MediaInfoDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final MediaInfoIdValue mid = new MediaInfoIdValueImpl("M42", "http://example.com/entity/"); private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), mid); private final List statementGroups = Collections.singletonList( new StatementGroupImpl(Collections.singletonList(s)) ); private final MonolingualTextValue label = new TermImpl("en", "label"); private final List labelList = Collections.singletonList(label); private final MediaInfoDocument mi1 = new MediaInfoDocumentImpl(mid, labelList, statementGroups,1234); private final MediaInfoDocument mi2 = new MediaInfoDocumentImpl(mid, labelList, statementGroups, 1234); private final String JSON_MEDIA_INFO_LABEL = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"label\"}},\"claims\":{}}"; private final String JSON_MEDIA_INFO_DESCRIPTION = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{},\"descriptions\":{},\"statements\":{}}"; private final String JSON_MEDIA_INFO_STATEMENTS = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{},\"statements\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]}}"; private final String JSON_MEDIA_INFO_CLAIMS = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]}}"; private final String JSON_MEDIA_INFO_EMPTY_ARRAYS = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":[],\"descriptions\":[],\"statements\":[],\"sitelinks\":[]}"; @Test public void fieldsAreCorrect() { assertEquals(mi1.getEntityId(), mid); assertEquals(mi1.getLabels(), Collections.singletonMap(label.getLanguageCode(), label)); assertEquals(mi1.getStatementGroups(), statementGroups); } @Test public void findLabels() { assertEquals("label", mi1.findLabel("en")); assertNull( mi1.findLabel("ja")); } @Test public void equalityBasedOnContent() { MediaInfoDocument irDiffLabel = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 1234); MediaInfoDocument irDiffStatementGroups = new MediaInfoDocumentImpl(mid, labelList, Collections.emptyList(), 1234); MediaInfoDocument irDiffRevisions = new MediaInfoDocumentImpl(mid, labelList, statementGroups, 1235); PropertyDocument pr = new PropertyDocumentImpl( new PropertyIdValueImpl("P42", "foo"), labelList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234); // we need to use empty lists of Statement groups to test inequality // based on different item ids with all other data being equal MediaInfoDocument irDiffMediaInfoIdValue = new MediaInfoDocumentImpl( new MediaInfoIdValueImpl("M23", "http://example.org/"), labelList, Collections.emptyList(), 1234); assertEquals(mi1, mi1); assertEquals(mi1, mi2); assertNotEquals(mi1, irDiffLabel); assertNotEquals(mi1, irDiffStatementGroups); assertNotEquals(mi1, irDiffRevisions); assertNotEquals(irDiffStatementGroups, irDiffMediaInfoIdValue); assertNotEquals(mi1, pr); assertNotEquals(mi1, null); assertNotEquals(mi1, this); } @Test public void hashBasedOnContent() { assertEquals(mi1.hashCode(), mi2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new MediaInfoDocumentImpl(null, Collections.emptyList(), statementGroups, 1234); } @Test public void labelsCanBeNull() { MediaInfoDocument doc = new MediaInfoDocumentImpl(mid, null, statementGroups, 1234); assertTrue(doc.getLabels().isEmpty()); } @Test public void statementGroupsCanBeNull() { MediaInfoDocument doc = new MediaInfoDocumentImpl(mid, Collections.emptyList(), null, 1234); assertTrue(doc.getStatementGroups().isEmpty()); } @Test(expected = IllegalArgumentException.class) public void statementGroupsUseSameSubject() { MediaInfoIdValue mid2 = new MediaInfoIdValueImpl("Q23", "http://example.org/"); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://wikibase.org/entity/")), Collections.emptyList(), Collections.emptyList(), mid2); StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(s2)); List statementGroups2 = new ArrayList<>(); statementGroups2.add(statementGroups.get(0)); statementGroups2.add(sg2); new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups2, 1234); } @Test public void iterateOverAllStatements() { Iterator statements = mi1.getAllStatements(); assertTrue(statements.hasNext()); assertEquals(s, statements.next()); assertFalse(statements.hasNext()); } @Test public void testWithEntityId() { assertEquals(MediaInfoIdValue.NULL, mi1.withEntityId(MediaInfoIdValue.NULL).getEntityId()); MediaInfoIdValue id = Datamodel.makeWikimediaCommonsMediaInfoIdValue("M123"); assertEquals(id, mi1.withEntityId(id).getEntityId()); } @Test public void testWithRevisionId() { assertEquals(1235L, mi1.withRevisionId(1235L).getRevisionId()); assertEquals(mi1, mi1.withRevisionId(1325L).withRevisionId(mi1.getRevisionId())); } @Test public void testWithLabelInNewLanguage() { MonolingualTextValue newLabel = new MonolingualTextValueImpl( "MediaInfo M42", "fr"); MediaInfoDocument withLabel = mi1.withLabel(newLabel); assertEquals("MediaInfo M42", withLabel.findLabel("fr")); } @Test public void testAddStatement() { Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), mid); Claim claim = fresh.getClaim(); assertFalse(mi1.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); MediaInfoDocument withStatement = mi1.withStatement(fresh); assertTrue(withStatement.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); } @Test public void testDeleteStatements() { Statement toRemove = statementGroups.get(0).getStatements().get(0); MediaInfoDocument withoutStatement = mi1.withoutStatementIds(Collections.singleton(toRemove.getStatementId())); assertNotEquals(withoutStatement, mi1); } @Test public void testLabelsToJson() throws JsonProcessingException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, labelList, Collections.emptyList(), 0); JsonComparator.compareJsonStrings(JSON_MEDIA_INFO_LABEL, mapper.writeValueAsString(document)); } @Test public void testLabelToJava() throws IOException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, labelList, Collections.emptyList(), 0); assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_LABEL, EntityDocumentImpl.class)); } @Test public void testDescriptionsToJava() throws IOException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), Collections.emptyList(), 0); assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_DESCRIPTION, EntityDocumentImpl.class)); } @Test public void testStatementsToJson() throws JsonProcessingException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 0); JsonComparator.compareJsonStrings(JSON_MEDIA_INFO_CLAIMS, mapper.writeValueAsString(document)); } @Test public void testStatementsToJava() throws IOException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 0); assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_STATEMENTS, MediaInfoDocumentImpl.class)); } @Test public void testStatementsNamedClaimsToJava() throws IOException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 0); assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_CLAIMS, MediaInfoDocumentImpl.class)); } /** * Checks support of wrong serialization of empty object as empty array */ @Test public void testEmptyArraysForTerms() throws IOException { MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), Collections.emptyList(), 0); assertEquals(document, mapper.readerFor(MediaInfoDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue(JSON_MEDIA_INFO_EMPTY_ARRAYS) ); } } MediaInfoIdValueImplTest.java000066400000000000000000000103321444772566300402620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.*; import java.io.IOException; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class MediaInfoIdValueImplTest { private final ObjectMapper mapper = new DatamodelMapper(Datamodel.SITE_WIKIMEDIA_COMMONS); private final MediaInfoIdValueImpl mediaInfo1 = new MediaInfoIdValueImpl("M42", "http://commons.wikimedia.org/entity/"); private final MediaInfoIdValueImpl mediaInfo2 = new MediaInfoIdValueImpl("M42", "http://commons.wikimedia.org/entity/"); private final MediaInfoIdValueImpl mediaInfo3 = new MediaInfoIdValueImpl("M57", "http://commons.wikimedia.org/entity/"); private final MediaInfoIdValueImpl mediaInfo4 = new MediaInfoIdValueImpl("M42", "http://www.example.org/entity/"); private final String JSON_MEDIA_INFO_ID_VALUE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"mediainfo\",\"numeric-id\":42,\"id\":\"M42\"}}"; private final String JSON_MEDIA_INFO_ID_VALUE_WITHOUT_NUMERICAL_ID = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"M42\"}}"; @Test public void entityTypeIsMediaInfo() { assertEquals(mediaInfo1.getEntityType(), EntityIdValue.ET_MEDIA_INFO); } @Test public void iriIsCorrect() { assertEquals(mediaInfo1.getIri(), "http://commons.wikimedia.org/entity/M42"); assertEquals(mediaInfo4.getIri(), "http://www.example.org/entity/M42"); } @Test public void siteIriIsCorrect() { assertEquals(mediaInfo1.getSiteIri(), "http://commons.wikimedia.org/entity/"); } @Test public void idIsCorrect() { assertEquals(mediaInfo1.getId(), "M42"); } @Test public void equalityBasedOnContent() { assertEquals(mediaInfo1, mediaInfo1); assertEquals(mediaInfo1, mediaInfo2); assertNotEquals(mediaInfo1, mediaInfo3); assertNotEquals(mediaInfo1, mediaInfo4); assertNotEquals(mediaInfo1, null); assertNotEquals(mediaInfo1, this); } @Test public void hashBasedOnContent() { assertEquals(mediaInfo1.hashCode(), mediaInfo2.hashCode()); } @Test(expected = RuntimeException.class) public void idValidatedForFirstLetter() { new MediaInfoIdValueImpl("Q12345", "http://commons.wikimedia.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForNumber() { new MediaInfoIdValueImpl("L34d23", "http://commons.wikimedia.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForLength() { new MediaInfoIdValueImpl("M", "http://commons.wikimedia.org/entity/"); } @Test(expected = RuntimeException.class) public void idNotNull() { new MediaInfoIdValueImpl((String)null, "http://commons.wikimedia.org/entity/"); } @Test(expected = NullPointerException.class) public void baseIriNotNull() { new MediaInfoIdValueImpl("M42", null); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_MEDIA_INFO_ID_VALUE, mapper.writeValueAsString(mediaInfo1)); } @Test public void testToJava() throws IOException { assertEquals(mediaInfo1, mapper.readValue(JSON_MEDIA_INFO_ID_VALUE, ValueImpl.class)); } @Test public void testToJavaWithoutNumericalID() throws IOException { assertEquals(mediaInfo1, mapper.readValue(JSON_MEDIA_INFO_ID_VALUE_WITHOUT_NUMERICAL_ID, ValueImpl.class)); } @Test public void testIsPlaceholder() { assertFalse(mediaInfo1.isPlaceholder()); } } MediaInfoUpdateImplTest.java000066400000000000000000000065531444772566300401650ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.MediaInfoUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class MediaInfoUpdateImplTest { private static final MediaInfoIdValue M1 = Datamodel.makeWikimediaCommonsMediaInfoIdValue("M1"); private static final StatementUpdate STATEMENTS = StatementDocumentUpdateImplTest.STATEMENTS; private static final TermUpdate LABELS = LabeledDocumentUpdateImplTest.LABELS; @Test public void testFields() { MediaInfoUpdate update = new MediaInfoUpdateImpl(M1, 123, LABELS, STATEMENTS); assertEquals(M1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(LABELS, update.getLabels()); assertSame(STATEMENTS, update.getStatements()); } @Test public void testEmpty() { assertTrue(new MediaInfoUpdateImpl(M1, 123, TermUpdate.EMPTY, StatementUpdate.EMPTY).isEmpty()); assertFalse(new MediaInfoUpdateImpl(M1, 123, LABELS, StatementUpdate.EMPTY).isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { MediaInfoUpdate update = new MediaInfoUpdateImpl(M1, 123, LABELS, STATEMENTS); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new MediaInfoUpdateImpl(M1, 123, LABELS, STATEMENTS))); assertFalse(update.equals(new MediaInfoUpdateImpl(M1, 123, TermUpdate.EMPTY, STATEMENTS))); } @Test public void testHashCode() { assertEquals( new MediaInfoUpdateImpl(M1, 123, LABELS, STATEMENTS).hashCode(), new MediaInfoUpdateImpl(M1, 123, LABELS, STATEMENTS).hashCode()); } @Test public void testJson() { assertThat(new MediaInfoUpdateImpl(M1, 123, TermUpdate.EMPTY, StatementUpdate.EMPTY), producesJson("{}")); assertThat(MediaInfoUpdateBuilder.forEntityId(M1).updateLabels(LABELS).build(), producesJson("{'labels':" + toJson(LABELS) + "}")); assertThat(MediaInfoUpdateBuilder.forEntityId(M1).updateStatements(STATEMENTS).build(), producesJson("{'claims':" + toJson(STATEMENTS) + "}")); } } MonolingualTextValueImplTest.java000066400000000000000000000052001444772566300413010ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import java.io.IOException; public class MonolingualTextValueImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final MonolingualTextValue mt1 = new MonolingualTextValueImpl("some string", "en"); private final MonolingualTextValue mt2 = new MonolingualTextValueImpl("some string", "en"); private final String JSON_MONOLINGUAL_TEXT_VALUE = "{\"value\":{\"language\":\"en\",\"text\":\"some string\"},\"type\":\"monolingualtext\"}"; @Test public void dataIsCorrect() { assertEquals(mt1.getText(), "some string"); assertEquals(mt1.getLanguageCode(), "en"); } @Test public void equalityBasedOnContent() { MonolingualTextValue mtDiffString = new MonolingualTextValueImpl( "another string", "en"); MonolingualTextValue mtDiffLanguageCode = new MonolingualTextValueImpl( "some string", "en-GB"); assertEquals(mt1, mt1); assertEquals(mt1, mt2); assertNotEquals(mt1, mtDiffString); assertNotEquals(mt1, mtDiffLanguageCode); assertNotEquals(mt1, null); assertNotEquals(mt1, this); } @Test public void hashBasedOnContent() { assertEquals(mt1.hashCode(), mt2.hashCode()); } @Test(expected = NullPointerException.class) public void textNotNull() { new MonolingualTextValueImpl(null, "en"); } @Test(expected = NullPointerException.class) public void languageCodeNotNull() { new MonolingualTextValueImpl("some text", null); } @Test public void testToJava() throws IOException { assertEquals(mt1, mapper.readValue(JSON_MONOLINGUAL_TEXT_VALUE, MonolingualTextValueImpl.class)); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_MONOLINGUAL_TEXT_VALUE, mapper.writeValueAsString(mt1)); } } PropertyDocumentImplTest.java000066400000000000000000000300351444772566300405020ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.*; public class PropertyDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private PropertyIdValue pid = new PropertyIdValueImpl("P2", "http://example.com/entity/"); private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), pid); private final List statementGroups = Collections.singletonList( new StatementGroupImpl(Collections.singletonList(s)) ); private final MonolingualTextValue label = new TermImpl("en", "label"); private final List labelList = Collections.singletonList(label); private final MonolingualTextValue desc = new TermImpl("fr", "des"); private final List descList = Collections.singletonList(desc); private final MonolingualTextValue alias = new TermImpl("de", "alias"); private final List aliasList = Collections.singletonList(alias); private DatatypeIdValue datatypeId = new DatatypeIdImpl(DatatypeIdValue.DT_ITEM); private final PropertyDocument pd1 = new PropertyDocumentImpl(pid, labelList, descList, aliasList, statementGroups, datatypeId, 1234); private final PropertyDocument pd2 = new PropertyDocumentImpl(pid, labelList, descList, aliasList, statementGroups, datatypeId, 1234); private final String JSON_PROPERTY = "{\"type\":\"property\",\"id\":\"P2\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"label\"}},\"descriptions\":{\"fr\":{\"language\":\"fr\",\"value\":\"des\"}},\"aliases\":{\"de\":[{\"language\":\"de\",\"value\":\"alias\"}]},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]},\"datatype\":\"wikibase-item\",\"lastrevid\":1234}"; private final String JSON_PROPERTY_WITH_UNKNOWN_DATATYPE = "{\"type\":\"property\",\"id\":\"P2\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"label\"}},\"descriptions\":{},\"aliases\":{},\"claims\":{},\"datatype\":\"some-unknownDatatype\",\"lastrevid\":1234}"; @Test public void fieldsAreCorrect() { assertEquals(pd1.getEntityId(), pid); assertEquals(pd1.getLabels(), Collections.singletonMap(label.getLanguageCode(), label)); assertEquals(pd1.getDescriptions(), Collections.singletonMap(desc.getLanguageCode(), desc)); assertEquals( pd1.getAliases(), Collections.singletonMap(alias.getLanguageCode(), Collections.singletonList(alias)) ); assertEquals(pd1.getStatementGroups(), statementGroups); assertEquals(pd1.getDatatype(), datatypeId); } @Test public void hasStatements() { assertTrue(pd1.hasStatement("P42")); assertFalse(pd1.hasStatement("P43")); assertTrue(pd1.hasStatement(new PropertyIdValueImpl("P42", "http://example.com/entity/"))); assertFalse(pd1.hasStatement(Datamodel.makePropertyIdValue("P43", "http://example.com/entity/"))); } @Test public void findTerms() { assertEquals("label", pd1.findLabel("en")); assertNull( pd1.findLabel("ja")); assertEquals("des", pd1.findDescription("fr")); assertNull( pd1.findDescription("ja")); } @Test public void equalityBasedOnContent() { PropertyDocument pdDiffLabel = new PropertyDocumentImpl(pid, Collections.emptyList(), descList, aliasList, statementGroups, datatypeId, 1234); PropertyDocument pdDiffDesc = new PropertyDocumentImpl(pid, labelList, Collections.emptyList(), aliasList, statementGroups, datatypeId, 1234); PropertyDocument pdDiffAlias = new PropertyDocumentImpl(pid, labelList, descList, Collections.emptyList(), statementGroups, datatypeId, 1234); PropertyDocument pdDiffStatementGroups = new PropertyDocumentImpl(pid, labelList, descList, aliasList, Collections.emptyList(), datatypeId, 1234); PropertyDocument pdDiffDatatype = new PropertyDocumentImpl(pid, labelList, descList, aliasList, statementGroups, new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234); PropertyDocument pdDiffRevisions = new PropertyDocumentImpl(pid, labelList, descList, aliasList, statementGroups, datatypeId, 1235); ItemDocument id = new ItemDocumentImpl(new ItemIdValueImpl("Q42", "foo"), labelList, descList, aliasList, Collections.emptyList(), Collections.emptyList(), 1234); assertEquals(pd1, pd1); assertEquals(pd1, pd2); assertNotEquals(pd1, pdDiffLabel); assertNotEquals(pd1, pdDiffDesc); assertNotEquals(pd1, pdDiffAlias); assertNotEquals(pd1, pdDiffStatementGroups); assertNotEquals(pd1, pdDiffDatatype); assertNotEquals(pd1, pdDiffRevisions); assertNotEquals(pd1, id); assertNotEquals(pd1, null); assertNotEquals(pd1, this); } @Test public void hashBasedOnContent() { assertEquals(pd1.hashCode(), pd2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new PropertyDocumentImpl(null, labelList, descList, aliasList, statementGroups, datatypeId, 1234); } @Test public void labelsCanBeNull() { PropertyDocument doc = new PropertyDocumentImpl(pid, null, descList, aliasList, statementGroups, datatypeId, 1234); assertEquals(Collections.emptyMap(), doc.getLabels()); } @Test public void descriptionsCanBeNull() { PropertyDocument doc = new PropertyDocumentImpl(pid, labelList, null, aliasList, statementGroups, datatypeId, 1234); assertEquals(Collections.emptyMap(), doc.getDescriptions()); } @Test public void aliasesCanBeNull() { PropertyDocument doc = new PropertyDocumentImpl(pid, labelList, descList, null, statementGroups, datatypeId, 1234); assertEquals(Collections.emptyMap(), doc.getAliases()); } @Test public void statementGroupsCanBeNull() { PropertyDocument doc = new PropertyDocumentImpl(pid, labelList, descList, aliasList, null, datatypeId, 1234); assertEquals(Collections.emptyList(), doc.getStatementGroups()); } @Test(expected = NullPointerException.class) public void datatypeNotNull() { new PropertyDocumentImpl(pid, labelList, descList, aliasList, statementGroups, null, 1234); } @Test(expected = IllegalArgumentException.class) public void labelUniquePerLanguage() { List labels2 = new ArrayList<>(labelList); labels2.add(new MonolingualTextValueImpl("Property 42 label duplicate", "en")); new PropertyDocumentImpl(pid, labels2, descList, aliasList, statementGroups, datatypeId, 1234); } @Test(expected = IllegalArgumentException.class) public void descriptionUniquePerLanguage() { List descriptions2 = new ArrayList<>(descList); descriptions2.add(new MonolingualTextValueImpl("Duplicate desc P42", "fr")); new PropertyDocumentImpl(pid, labelList, descriptions2, aliasList, statementGroups, datatypeId, 1234); } @Test public void testWithEntityId() { assertEquals(PropertyIdValue.NULL, pd1.withEntityId(PropertyIdValue.NULL).getEntityId()); PropertyIdValue id = Datamodel.makeWikidataPropertyIdValue("P123"); assertEquals(id, pd1.withEntityId(id).getEntityId()); } @Test public void testWithRevisionId() { assertEquals(1235L, pd1.withRevisionId(1235L).getRevisionId()); assertEquals(pd1, pd1.withRevisionId(1325L).withRevisionId(pd1.getRevisionId())); } @Test public void testWithLabelInNewLanguage() { MonolingualTextValue newLabel = new MonolingualTextValueImpl( "Propriété P42", "fr"); PropertyDocument withLabel = pd1.withLabel(newLabel); assertEquals("Propriété P42", withLabel.findLabel("fr")); assertEquals("label", withLabel.findLabel("en")); } @Test public void testWithOverridenLabel() { MonolingualTextValue newLabel = new MonolingualTextValueImpl( "The P42 Property", "en"); PropertyDocument withLabel = pd1.withLabel(newLabel); assertEquals("The P42 Property", withLabel.findLabel("en")); } @Test public void testWithIdenticalLabel() { MonolingualTextValue newLabel = new MonolingualTextValueImpl( "label", "en"); PropertyDocument withLabel = pd1.withLabel(newLabel); assertEquals(withLabel, pd1); } @Test public void testWithDescriptionInNewLanguage() { MonolingualTextValue newDescription = new MonolingualTextValueImpl( "Beschreibung", "de"); PropertyDocument withDescription = pd1.withDescription(newDescription); assertEquals("des", withDescription.findDescription("fr")); assertEquals("Beschreibung", withDescription.findDescription("de")); } @Test public void testPropertyToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_PROPERTY, mapper.writeValueAsString(pd1)); } @Test public void testPropertyToJava() throws IOException { assertEquals(pd1, mapper.readValue(JSON_PROPERTY, PropertyDocumentImpl.class)); } @Test public void testPropertyToJavaWithUnknownDatatype() throws JsonProcessingException { PropertyDocumentImpl pd = mapper.readValue(JSON_PROPERTY_WITH_UNKNOWN_DATATYPE, PropertyDocumentImpl.class); assertEquals("some-unknownDatatype", pd.getJsonDatatype()); } @Test public void testWithOverridenDescription() { MonolingualTextValue newDescription = new MonolingualTextValueImpl( "une meilleure description", "fr"); PropertyDocument withDescription = pd1.withDescription(newDescription); assertEquals("une meilleure description", withDescription.findDescription("fr")); } @Test public void testWithIdenticalDescription() { MonolingualTextValue newDescription = new MonolingualTextValueImpl( "des", "fr"); PropertyDocument withDescription = pd1.withDescription(newDescription); assertEquals(withDescription, pd1); } @Test public void testWithAliasInNewLanguage() { MonolingualTextValue newAlias = new MonolingualTextValueImpl( "Prop42", "fr"); PropertyDocument withAliases = pd1.withAliases("fr", Collections.singletonList(newAlias)); assertEquals(Collections.singletonList(newAlias), withAliases.getAliases().get("fr")); } @Test public void testWithOverridenAliases() { MonolingualTextValue newAlias = new MonolingualTextValueImpl( "A new alias of P42", "en"); PropertyDocument withAlias = pd1.withAliases("en", Collections.singletonList(newAlias)); assertEquals(Collections.singletonList(newAlias), withAlias.getAliases().get("en")); } @Test public void testAddStatement() { Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), pid); Claim claim = fresh.getClaim(); assertFalse(pd1.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); PropertyDocument withStatement = pd1.withStatement(fresh); assertTrue(withStatement.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); } @Test public void testDeleteStatements() { Statement toRemove = statementGroups.get(0).getStatements().get(0); PropertyDocument withoutStatement = pd1.withoutStatementIds(Collections.singleton(toRemove.getStatementId())); assertNotEquals(withoutStatement, pd1); } } PropertyIdValueImplTest.java000066400000000000000000000072071444772566300402620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import java.io.IOException; public class PropertyIdValueImplTest { private final ObjectMapper mapper = new DatamodelMapper(Datamodel.SITE_WIKIDATA); private final PropertyIdValueImpl prop1 = new PropertyIdValueImpl("P42", "http://www.wikidata.org/entity/"); private final PropertyIdValueImpl prop2 = new PropertyIdValueImpl("P42", "http://www.wikidata.org/entity/"); private final PropertyIdValueImpl prop3 = new PropertyIdValueImpl("P57", "http://www.wikidata.org/entity/"); private final PropertyIdValueImpl prop4 = new PropertyIdValueImpl("P42", "http://www.example.org/entity/"); private final String JSON_PROPERTY_ID_VALUE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"property\",\"numeric-id\":42,\"id\":\"P42\"}}"; private final String JSON_PROPERTY_ID_VALUE_WITHOUT_NUMERICAL_ID = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"P42\"}}"; @Test public void entityTypeIsProperty() { assertEquals(prop1.getEntityType(), EntityIdValue.ET_PROPERTY); } @Test public void iriIsCorrect() { assertEquals(prop1.getIri(), "http://www.wikidata.org/entity/P42"); assertEquals(prop4.getIri(), "http://www.example.org/entity/P42"); } @Test public void idIsCorrect() { assertEquals(prop1.getId(), "P42"); } @Test public void equalityBasedOnContent() { assertEquals(prop1, prop1); assertEquals(prop1, prop2); assertNotEquals(prop1, prop3); assertNotEquals(prop1, prop4); assertNotEquals(prop1, null); assertNotEquals(prop1, this); } @Test public void hashBasedOnContent() { assertEquals(prop1.hashCode(), prop2.hashCode()); } @Test(expected = RuntimeException.class) public void idValidatedForFirstLetter() { new PropertyIdValueImpl("Q12345", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForLength() { new ItemIdValueImpl("P", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForNumber() { new PropertyIdValueImpl("P34d23", "http://www.wikidata.org/entity/"); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_PROPERTY_ID_VALUE, mapper.writeValueAsString(prop1)); } @Test public void testToJava() throws IOException { assertEquals(prop1, mapper.readValue(JSON_PROPERTY_ID_VALUE, ValueImpl.class)); } @Test public void testToJavaWithoutNumericalID() throws IOException { assertEquals(prop1, mapper.readValue(JSON_PROPERTY_ID_VALUE_WITHOUT_NUMERICAL_ID, ValueImpl.class)); } @Test public void testIsPlaceholder() { assertFalse(prop1.isPlaceholder()); } } PropertyUpdateImplTest.java000066400000000000000000000110231444772566300401420ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import java.util.Collections; import java.util.Map; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.PropertyUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class PropertyUpdateImplTest { private static final PropertyIdValue P1 = Datamodel.makeWikidataPropertyIdValue("P1"); private static final StatementUpdate STATEMENTS = StatementUpdateBuilder.create().remove("ID123").build(); private static final TermUpdate LABELS = LabeledDocumentUpdateImplTest.LABELS; private static final TermUpdate DESCRIPTIONS = TermedDocumentUpdateImplTest.DESCRIPTIONS; private static final AliasUpdate ALIAS = TermedDocumentUpdateImplTest.ALIAS; private static final Map ALIASES = TermedDocumentUpdateImplTest.ALIASES; @Test public void testFields() { PropertyUpdate update = new PropertyUpdateImpl(P1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS); assertEquals(P1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(LABELS, update.getLabels()); assertSame(DESCRIPTIONS, update.getDescriptions()); assertEquals(ALIASES, update.getAliases()); assertSame(STATEMENTS, update.getStatements()); } @Test public void testEmpty() { PropertyUpdate empty = new PropertyUpdateImpl(P1, 123, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY); assertTrue(empty.isEmpty()); PropertyUpdate nonempty = new PropertyUpdateImpl(P1, 123, TermUpdate.EMPTY, DESCRIPTIONS, Collections.emptyMap(), StatementUpdate.EMPTY); assertFalse(nonempty.isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { PropertyUpdate update = new PropertyUpdateImpl(P1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new PropertyUpdateImpl(P1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS))); assertFalse(update.equals(new PropertyUpdateImpl(P1, 123, LABELS, TermUpdate.EMPTY, ALIASES, STATEMENTS))); } @Test public void testHashCode() { assertEquals( new PropertyUpdateImpl(P1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS).hashCode(), new PropertyUpdateImpl(P1, 123, LABELS, DESCRIPTIONS, ALIASES, STATEMENTS).hashCode()); } @Test public void testJson() { assertThat( new PropertyUpdateImpl( P1, 123, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), StatementUpdate.EMPTY), producesJson("{}")); assertThat(PropertyUpdateBuilder.forEntityId(P1).updateLabels(LABELS).build(), producesJson("{'labels':" + toJson(LABELS) + "}")); assertThat(PropertyUpdateBuilder.forEntityId(P1).updateDescriptions(DESCRIPTIONS).build(), producesJson("{'descriptions':" + toJson(LABELS) + "}")); assertThat(PropertyUpdateBuilder.forEntityId(P1).updateAliases("en", ALIAS).build(), producesJson("{'aliases':{'en':" + toJson(ALIAS) + "}}")); assertThat(PropertyUpdateBuilder.forEntityId(P1).updateStatements(STATEMENTS).build(), producesJson("{'claims':" + toJson(STATEMENTS) + "}")); } } QuantityValueImplTest.java000066400000000000000000000152521444772566300377760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThrows; import java.io.IOException; import java.math.BigDecimal; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.exc.ValueInstantiationException; public class QuantityValueImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final BigDecimal nv = new BigDecimal( "0.123456789012345678901234567890123456789"); private final BigDecimal lb = new BigDecimal( "0.123456789012345678901234567890123456788"); private final BigDecimal ub = new BigDecimal( "0.123456789012345678901234567890123456790"); private final String unitMeter = "http://wikidata.org/entity/Q11573"; private final ItemIdValue unitMeterId = ItemIdValueImpl.fromIri(unitMeter); private final QuantityValue q1 = new QuantityValueImpl(nv, lb, ub, unitMeterId); private final QuantityValue q2 = new QuantityValueImpl(nv, lb, ub, unitMeterId); private final QuantityValue q3 = new QuantityValueImpl(nv, null, null, unitMeterId); private final QuantityValue q4 = new QuantityValueImpl(nv, lb, ub, (ItemIdValue) null); private static String JSON_QUANTITY_VALUE = "{\"value\":{\"amount\":\"+0.123456789012345678901234567890123456789\",\"lowerBound\":\"+0.123456789012345678901234567890123456788\",\"upperBound\":\"+0.123456789012345678901234567890123456790\",\"unit\":\"http://wikidata.org/entity/Q11573\"},\"type\":\"quantity\"}"; private static String JSON_UNBOUNDED_QUANTITY_VALUE = "{\"value\":{\"amount\":\"+0.123456789012345678901234567890123456789\",\"unit\":\"http://wikidata.org/entity/Q11573\"},\"type\":\"quantity\"}"; private static String JSON_INVALID_UNIT = "{\"value\":{\"amount\":\"+0.1234567890123\",\"unit\":\"not_a_url\"},\"type\":\"quantity\"}"; @Test public void gettersWorking() { assertEquals(q1.getNumericValue(), nv); assertEquals(q1.getLowerBound(), lb); assertEquals(q1.getUpperBound(), ub); } @Test public void getUnitItemId() { assertEquals(new ItemIdValueImpl("Q11573", "http://wikidata.org/entity/"), q1.getUnitItemId()); } @Test public void getUnitItemIdNoUnit() { assertNull(q4.getUnitItemId()); } @Test @SuppressWarnings("deprecation") public void getUnitItemIdInvalidIri() { assertThrows(IllegalArgumentException.class, () -> new QuantityValueImpl(nv, lb, ub, "foobar")); } @Test public void equalityBasedOnContent() { BigDecimal nvplus = new BigDecimal( "0.1234567890123456789012345678901234567895"); BigDecimal nvminus = new BigDecimal( "0.1234567890123456789012345678901234567885"); QuantityValue q4 = new QuantityValueImpl(nvplus, lb, ub, unitMeterId); QuantityValue q5 = new QuantityValueImpl(nv, nvminus, ub, unitMeterId); QuantityValue q6 = new QuantityValueImpl(nv, lb, nvplus, unitMeterId); QuantityValue q7 = new QuantityValueImpl(nv, lb, ub, (ItemIdValue)null); assertEquals(q1, q1); assertEquals(q1, q2); assertNotEquals(q1, q3); assertNotEquals(q1, q4); assertNotEquals(q1, q5); assertNotEquals(q1, q6); assertNotEquals(q1, q7); assertNotEquals(q1, null); assertNotEquals(q1, this); } @Test public void equalityBasedOnRepresentation() { BigDecimal amount1 = new BigDecimal("4.00"); BigDecimal amount2 = new BigDecimal("4"); assertNotEquals(amount1, amount2); QuantityValue quantity1 = new QuantityValueImpl(amount1, null, null, (ItemIdValue)null); QuantityValue quantity2 = new QuantityValueImpl(amount2, null, null, (ItemIdValue)null); assertNotEquals(quantity1, quantity2); } @Test public void faithfulJsonSerialization() { BigDecimal amount = new BigDecimal("4.00"); QuantityValueImpl quantity = new QuantityValueImpl(amount, null, null, (ItemIdValue)null); assertEquals("+4.00", quantity.getValue().getAmountAsString()); } @Test public void hashBasedOnContent() { assertEquals(q1.hashCode(), q2.hashCode()); } @Test public void numValueNotNull() { assertThrows(NullPointerException.class, () -> new QuantityValueImpl(null, lb, ub, unitMeterId)); } @Test public void lowerBoundNotNull() { assertThrows(NullPointerException.class, () -> new QuantityValueImpl(nv, null, ub, unitMeterId)); } @Test public void upperBoundNotNull() { assertThrows(NullPointerException.class, () -> new QuantityValueImpl(nv, lb, null, unitMeterId)); } @Test @SuppressWarnings("deprecation") public void unitNotNull() { assertThrows(NullPointerException.class, () -> new QuantityValueImpl(nv, lb, ub, (String) null)); } @Test @SuppressWarnings("deprecation") public void unitNotEmpty() { assertThrows(IllegalArgumentException.class, () -> new QuantityValueImpl(nv, lb, ub, (String) "")); } @Test public void lowerBoundNotGreaterNumVal() { assertThrows(IllegalArgumentException.class, () -> new QuantityValueImpl(lb, nv, ub, unitMeterId)); } @Test public void numValNotGreaterLowerBound() { assertThrows(IllegalArgumentException.class, () -> new QuantityValueImpl(ub, lb, nv, unitMeterId)); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_QUANTITY_VALUE, mapper.writeValueAsString(q1)); } @Test public void testToJava() throws IOException { assertEquals(q1, mapper.readValue(JSON_QUANTITY_VALUE, ValueImpl.class)); } @Test public void testParseInvalidUnit() throws IOException { assertThrows(ValueInstantiationException.class, () -> mapper.readValue(JSON_INVALID_UNIT, ValueImpl.class)); } @Test public void testUnboundedToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_UNBOUNDED_QUANTITY_VALUE, mapper.writeValueAsString(q3)); } @Test public void testUnboundedToJava() throws IOException { assertEquals(q3, mapper.readValue(JSON_UNBOUNDED_QUANTITY_VALUE, ValueImpl.class)); } } ReferenceImplTest.java000066400000000000000000000047721444772566300370660ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.util.Collections; import java.util.Iterator; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; public class ReferenceImplTest { private final EntityIdValue subject = new ItemIdValueImpl("Q42", "http://wikidata.org/entity/"); private final PropertyIdValue property = new PropertyIdValueImpl( "P42", "http://wikidata.org/entity/"); private final ValueSnak valueSnak = new ValueSnakImpl(property, subject); private final SnakGroup snakGroup = new SnakGroupImpl( Collections. singletonList(valueSnak)); private final Reference r1 = new ReferenceImpl(Collections.singletonList(snakGroup)); private final Reference r2 = new ReferenceImpl(Collections.singletonList(snakGroup)); @Test public void snakListIsCorrect() { assertEquals(r1.getSnakGroups(), Collections.singletonList(snakGroup)); } @Test public void equalityBasedOnContent() { Reference r3 = new ReferenceImpl(Collections.emptyList()); assertEquals(r1, r1); assertEquals(r1, r2); assertNotEquals(r1, r3); assertNotEquals(r1, null); assertNotEquals(r1, this); } @Test public void hashBasedOnContent() { assertEquals(r1.hashCode(), r2.hashCode()); } @Test(expected = NullPointerException.class) public void snakListNotNull() { new ReferenceImpl(null); } @Test public void iterateOverAllSnaks() { Iterator snaks = r1.getAllSnaks(); assertTrue(snaks.hasNext()); assertEquals(valueSnak, snaks.next()); assertFalse(snaks.hasNext()); } } SenseDocumentImplTest.java000066400000000000000000000173701444772566300377420ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class SenseDocumentImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final SenseIdValue sid = new SenseIdValueImpl("L42-S1", "http://example.com/entity/"); private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), sid); private final List statementGroups = Collections.singletonList( new StatementGroupImpl(Collections.singletonList(s)) ); private final MonolingualTextValue rep = new TermImpl("en", "rep"); private final List repList = Collections.singletonList(rep); private final SenseDocument sd1 = new SenseDocumentImpl(sid, repList, statementGroups, 1234); private final SenseDocument sd2 = new SenseDocumentImpl(sid, repList, statementGroups, 1234); private final String JSON_SENSE = "{\"type\":\"sense\",\"id\":\"L42-S1\",\"glosses\":{\"en\":{\"language\":\"en\",\"value\":\"rep\"}},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]},\"lastrevid\":1234}"; @Test public void fieldsAreCorrect() { assertEquals(sd1.getEntityId(), sid); assertEquals(sd1.getGlosses(), Collections.singletonMap(rep.getLanguageCode(), rep)); assertEquals(sd1.getStatementGroups(), statementGroups); } @Test public void equalityBasedOnContent() { SenseDocument irDiffGlosses = new SenseDocumentImpl(sid, Collections.singletonList(new MonolingualTextValueImpl("fr", "bar")), statementGroups, 1234); SenseDocument irDiffStatementGroups = new SenseDocumentImpl(sid, repList, Collections.emptyList(), 1234); SenseDocument irDiffRevisions = new SenseDocumentImpl(sid, repList, statementGroups, 1235); PropertyDocument pr = new PropertyDocumentImpl( new PropertyIdValueImpl("P42", "foo"), repList, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234); SenseDocument irDiffSenseIdValue = new SenseDocumentImpl( new SenseIdValueImpl("L42-S2", "http://example.com/entity/"), repList, Collections.emptyList(), 1235); assertEquals(sd1, sd1); assertEquals(sd1, sd2); assertNotEquals(sd1, irDiffGlosses); assertNotEquals(sd1, irDiffStatementGroups); assertNotEquals(sd1, irDiffRevisions); assertNotEquals(irDiffStatementGroups, irDiffSenseIdValue); assertNotEquals(sd1, pr); assertNotEquals(sd1, null); assertNotEquals(sd1, this); } @Test public void hashBasedOnContent() { assertEquals(sd1.hashCode(), sd2.hashCode()); } @Test(expected = NullPointerException.class) public void idNotNull() { new SenseDocumentImpl(null, repList, statementGroups, 1234); } @Test public void glossesNull() { assertEquals(Collections.emptyMap(), new SenseDocumentImpl(sid, null, statementGroups, 1234).getGlosses()); } @Test public void glossesEmpty() { assertEquals(Collections.emptyMap(), new SenseDocumentImpl(sid, Collections.emptyList(), statementGroups, 1234).getGlosses()); } @Test public void statementGroupsCanBeNull() { SenseDocument doc = new SenseDocumentImpl(sid, repList, null, 1234); assertTrue(doc.getStatementGroups().isEmpty()); } @Test(expected = IllegalArgumentException.class) public void statementGroupsUseSameSubject() { SenseIdValue iid2 = new SenseIdValueImpl("Q23", "http://example.org/"); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://wikibase.org/entity/")), Collections.emptyList(), Collections.emptyList(), iid2); StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(s2)); List statementGroups2 = new ArrayList<>(); statementGroups2.add(statementGroups.get(0)); statementGroups2.add(sg2); new SenseDocumentImpl(sid, repList, statementGroups2, 1234); } @Test public void iterateOverAllStatements() { Iterator statements = sd1.getAllStatements(); assertTrue(statements.hasNext()); assertEquals(s, statements.next()); assertFalse(statements.hasNext()); } @Test public void testWithEntityId() { assertEquals(SenseIdValue.NULL, sd1.withEntityId(SenseIdValue.NULL).getEntityId()); SenseIdValue id = Datamodel.makeWikidataSenseIdValue("L123-S45"); assertEquals(id, sd1.withEntityId(id).getEntityId()); } @Test public void testWithRevisionId() { assertEquals(1235L, sd1.withRevisionId(1235L).getRevisionId()); assertEquals(sd1, sd1.withRevisionId(1325L).withRevisionId(sd1.getRevisionId())); } @Test public void testWithGlossInNewLanguage() { MonolingualTextValue newGloss = new MonolingualTextValueImpl("Foo", "fr"); SenseDocument withGloss = sd1.withGloss(newGloss); assertEquals(newGloss, withGloss.getGlosses().get("fr")); } @Test public void testAddStatement() { Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL, new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")), Collections.emptyList(), Collections.emptyList(), sid); Claim claim = fresh.getClaim(); assertFalse(sd1.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); SenseDocument withStatement = sd1.withStatement(fresh); assertTrue(withStatement.hasStatementValue( claim.getMainSnak().getPropertyId(), claim.getValue())); } @Test public void testDeleteStatements() { Statement toRemove = statementGroups.get(0).getStatements().get(0); SenseDocument withoutStatement = sd1.withoutStatementIds(Collections.singleton(toRemove.getStatementId())); assertNotEquals(withoutStatement, sd1); } @Test public void testSenseToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_SENSE, mapper.writeValueAsString(sd1)); } @Test public void testSenseToJava() throws IOException { assertEquals(sd1, mapper.readValue(JSON_SENSE, SenseDocumentImpl.class)); } } SenseIdValueImplTest.java000066400000000000000000000105241444772566300375070ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import java.io.IOException; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class SenseIdValueImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://www.wikidata.org/entity/"); private final SenseIdValueImpl sense1 = new SenseIdValueImpl("L42-S1", "http://www.wikidata.org/entity/"); private final SenseIdValueImpl sense2 = new SenseIdValueImpl("L42-S1", "http://www.wikidata.org/entity/"); private final SenseIdValueImpl sense3 = new SenseIdValueImpl("L57-S2", "http://www.wikidata.org/entity/"); private final SenseIdValueImpl sense4 = new SenseIdValueImpl("L42-S1", "http://www.example.org/entity/"); private final String JSON_SENSE_ID_VALUE = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"sense\",\"id\":\"L42-S1\"}}"; private final String JSON_SENSE_ID_VALUE_WITHOUT_TYPE = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"L42-S1\"}}"; @Test public void entityTypeIsSense() { assertEquals(sense1.getEntityType(), EntityIdValue.ET_SENSE); } @Test public void iriIsCorrect() { assertEquals(sense1.getIri(), "http://www.wikidata.org/entity/L42-S1"); assertEquals(sense4.getIri(), "http://www.example.org/entity/L42-S1"); } @Test public void siteIriIsCorrect() { assertEquals(sense1.getSiteIri(), "http://www.wikidata.org/entity/"); } @Test public void idIsCorrect() { assertEquals(sense1.getId(), "L42-S1"); } @Test public void equalityBasedOnContent() { assertEquals(sense1, sense1); assertEquals(sense1, sense2); assertNotEquals(sense1, sense3); assertNotEquals(sense1, sense4); assertNotEquals(sense1, null); assertNotEquals(sense1, this); } @Test public void hashBasedOnContent() { assertEquals(sense1.hashCode(), sense2.hashCode()); } @Test(expected = RuntimeException.class) public void idValidatedForFirstLetter() { new SenseIdValueImpl("Q12345", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForNumber() { new SenseIdValueImpl("L34d23", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForLength() { new SenseIdValueImpl("L", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idValidatedForParts() { new SenseIdValueImpl("L21", "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void idNotNull() { new SenseIdValueImpl((String)null, "http://www.wikidata.org/entity/"); } @Test(expected = IllegalArgumentException.class) public void baseIriNotNull() { new SenseIdValueImpl("L42", null); } @Test public void lexemeIdIsCorrect() { assertEquals(sense1.getLexemeId(), new LexemeIdValueImpl("L42", "http://www.wikidata.org/entity/")); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_SENSE_ID_VALUE, mapper.writeValueAsString(sense1)); } @Test public void testToJava() throws IOException { assertEquals(sense1, mapper.readValue(JSON_SENSE_ID_VALUE, ValueImpl.class)); } @Test public void testToJavaWithoutNumericalID() throws IOException { assertEquals(sense1, mapper.readValue(JSON_SENSE_ID_VALUE_WITHOUT_TYPE, ValueImpl.class)); } @Test public void testIsPlaceholder() { assertFalse(sense1.isPlaceholder()); } } SenseUpdateImplTest.java000066400000000000000000000075501444772566300374050ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.SenseUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.SenseIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class SenseUpdateImplTest { private static final SenseIdValue S1 = Datamodel.makeWikidataSenseIdValue("L1-S1"); private static final StatementUpdate STATEMENTS = StatementDocumentUpdateImplTest.STATEMENTS; private static final TermUpdate GLOSSES = TermUpdateBuilder.create().remove("en").build(); @Test public void testFields() { SenseUpdate update = new SenseUpdateImpl(S1, 123, GLOSSES, STATEMENTS); assertEquals(S1, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(GLOSSES, update.getGlosses()); assertSame(STATEMENTS, update.getStatements()); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> new SenseUpdateImpl(S1, 0, null, StatementUpdate.EMPTY)); } @Test public void testEmpty() { assertFalse(new SenseUpdateImpl(S1, 0, TermUpdate.EMPTY, STATEMENTS).isEmpty()); assertFalse(new SenseUpdateImpl(S1, 0, GLOSSES, StatementUpdate.EMPTY).isEmpty()); assertTrue(new SenseUpdateImpl(S1, 0, TermUpdate.EMPTY, StatementUpdate.EMPTY).isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { SenseUpdate update = new SenseUpdateImpl(S1, 0, GLOSSES, STATEMENTS); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals( new SenseUpdateImpl(S1, 0, TermUpdateBuilder.create().remove("en").build(), STATEMENTS))); assertFalse(update.equals(new SenseUpdateImpl(S1, 123, GLOSSES, StatementUpdate.EMPTY))); assertFalse(update.equals(new SenseUpdateImpl(S1, 123, TermUpdate.EMPTY, STATEMENTS))); } @Test public void testHashCode() { SenseUpdate update1 = new SenseUpdateImpl(S1, 123, GLOSSES, STATEMENTS); SenseUpdate update2 = new SenseUpdateImpl(S1, 123, TermUpdateBuilder.create().remove("en").build(), STATEMENTS); assertEquals(update1.hashCode(), update2.hashCode()); } @Test public void testJson() { assertThat(new SenseUpdateImpl(S1, 123, TermUpdate.EMPTY, StatementUpdate.EMPTY), producesJson("{}")); assertThat(SenseUpdateBuilder.forEntityId(S1).updateGlosses(GLOSSES).build(), producesJson("{'glosses':" + toJson(GLOSSES) + "}")); assertThat(SenseUpdateBuilder.forEntityId(S1).updateStatements(STATEMENTS).build(), producesJson("{'claims':" + toJson(STATEMENTS) + "}")); } } SiteLinkImplTest.java000066400000000000000000000063021444772566300367010ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; public class SiteLinkImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final List badges = Arrays.asList( new ItemIdValueImpl("Q43", "http://example.com/entity/"), new ItemIdValueImpl("Q42", "http://example.com/entity/") ); private final SiteLink s1 = new SiteLinkImpl("Dresden", "enwiki", badges); private final SiteLink s2 = new SiteLinkImpl("Dresden", "enwiki", badges); private final String JSON_SITE_LINK = "{\"site\":\"enwiki\", \"title\":\"Dresden\", \"badges\":[\"Q42\",\"Q43\"]}"; @Test public void fieldsIsCorrect() { assertEquals(s1.getPageTitle(), "Dresden"); assertEquals(s1.getSiteKey(), "enwiki"); assertEquals(s1.getBadges(), badges); } @Test public void equalityBasedOnContent() { SiteLink sDiffTitle = new SiteLinkImpl("Berlin", "enwiki", badges); SiteLink sDiffSiteKey = new SiteLinkImpl("Dresden", "dewiki", badges); SiteLink sDiffBadges = new SiteLinkImpl("Dresden", "enwiki", Collections.emptyList()); assertEquals(s1, s1); assertEquals(s1, s2); assertNotEquals(s1, sDiffTitle); assertNotEquals(s1, sDiffSiteKey); assertNotEquals(s1, sDiffBadges); assertNotEquals(s1, null); assertNotEquals(s1, this); } @Test public void hashBasedOnContent() { assertEquals(s1.hashCode(), s2.hashCode()); } @Test(expected = NullPointerException.class) public void titleNotNull() { new SiteLinkImpl(null, "enwiki", Collections.emptyList()); } @Test(expected = NullPointerException.class) public void siteKeyNotNull() { new SiteLinkImpl("Dresden", null, Collections.emptyList()); } @Test public void badgesCanBeNull() { SiteLink sitelink = new SiteLinkImpl("Dresden", "enwiki", null); assertEquals(sitelink.getBadges(), Collections.emptyList()); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_SITE_LINK, mapper.writeValueAsString(s1)); } @Test public void testToJava() throws IOException { assertEquals(s1, mapper.readValue(JSON_SITE_LINK, SiteLinkImpl.class)); } } SitesImplTest.java000066400000000000000000000054151444772566300362520ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import java.util.Collections; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; public class SitesImplTest { private SitesImpl sites; @Before public void setUp() { this.sites = new SitesImpl(); this.sites.setSiteInformation("enwiki", "wikipedia", "en", "mediawiki", "http://en.wikipedia.org/w/$1", "http://en.wikipedia.org/wiki/$1"); this.sites.setSiteInformation("dewiki", "wikipedia", "de", "mediawiki", "//de.wikipedia.org/w/$1", "//de.wikipedia.org/wiki/$1"); this.sites.setSiteInformation("somesite", "group", "language", "something else", "http://example.org/file/$1", "http://example.org/page/$1"); } @Test public void siteLinkIri() { SiteLink sSpecialChar = new SiteLinkImpl("&", "dewiki", Collections.emptyList()); assertEquals(SitesImpl.DEFAULT_PROTOCOL_PREFIX + "//de.wikipedia.org/wiki/%26", this.sites.getSiteLinkUrl(sSpecialChar)); SiteLink sSpecialChar2 = new SiteLinkImpl("Björk", "enwiki", Collections.emptyList()); assertEquals("http://en.wikipedia.org/wiki/Bj%C3%B6rk", this.sites.getSiteLinkUrl(sSpecialChar2)); } @Test public void unknownSiteKey() { assertNull(this.sites.getGroup("somekey")); assertNull(this.sites.getSiteType("somekey")); assertNull(this.sites.getLanguageCode("somekey")); assertNull(this.sites.getFileUrl("somekey", "filename")); assertNull(this.sites.getPageUrl("somekey", "page name")); } @Test public void knownSiteKey() { assertEquals(this.sites.getGroup("enwiki"), "wikipedia"); assertEquals(this.sites.getSiteType("enwiki"), "mediawiki"); assertEquals(this.sites.getLanguageCode("enwiki"), "en"); assertEquals(this.sites.getFileUrl("enwiki", "filename"), "http://en.wikipedia.org/w/filename"); assertEquals(this.sites.getPageUrl("enwiki", "Page name"), "http://en.wikipedia.org/wiki/Page_name"); assertEquals(this.sites.getPageUrl("somesite", "Page name"), "http://example.org/page/Page+name"); } } SnakGroupTest.java000066400000000000000000000057601444772566300362550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.*; public class SnakGroupTest { private SnakGroup sg1; private SnakGroup sg2; private Snak snak1; private Snak snak2; private PropertyIdValue property; @Before public void setUp() { EntityIdValue subject = new ItemIdValueImpl("Q42", "http://wikidata.org/entity/"); property = new PropertyIdValueImpl("P42", "http://wikidata.org/entity/"); snak1 = new ValueSnakImpl(property, subject); snak2 = new SomeValueSnakImpl(property); sg1 = new SnakGroupImpl(Collections.singletonList(snak1)); sg2 = new SnakGroupImpl(Collections.singletonList(snak1)); } @Test public void implementsCollection() { assertFalse(sg1.isEmpty()); assertEquals(1, sg1.size()); assertTrue(sg1.contains(snak1)); assertFalse(sg1.contains(snak2)); assertTrue(sg1.iterator().hasNext()); assertEquals(sg1.iterator().next(), snak1); assertArrayEquals(new Snak[] {snak1}, sg1.toArray()); } @Test public void snakListIsCorrect() { assertEquals(sg1.getSnaks(), Collections.singletonList(snak1)); } @Test public void propertyIsCorrect() { assertEquals(sg1.getProperty(), property); } @Test public void equalityBasedOnContent() { List snaks = new ArrayList<>(); snaks.add(snak1); snaks.add(snak2); SnakGroup sg3 = new SnakGroupImpl(snaks); assertEquals(sg1, sg1); assertEquals(sg1, sg2); assertNotEquals(sg1, sg3); assertNotEquals(sg1, null); assertNotEquals(sg1, this); } @Test public void hashBasedOnContent() { assertEquals(sg1.hashCode(), sg2.hashCode()); } @Test(expected = IllegalArgumentException.class) public void snakListNotNull() { new SnakGroupImpl(null); } @Test(expected = IllegalArgumentException.class) public void snakListNotEmpty() { new SnakGroupImpl(Collections.emptyList()); } @Test(expected = IllegalArgumentException.class) public void snakListRequiresSameProperty() { List snaks = new ArrayList<>(); snaks.add(snak1); PropertyIdValue property2 = new PropertyIdValueImpl("P23", "http://wikidata.org/entity/"); Snak snak3 = new NoValueSnakImpl(property2); snaks.add(snak3); new SnakGroupImpl(snaks); } } SnakImplTest.java000066400000000000000000000150651444772566300360610ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import java.io.IOException; public class SnakImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final PropertyIdValue p1 = new PropertyIdValueImpl("P42", "http://example.com/entity/"); private final PropertyIdValue p2 = new PropertyIdValueImpl("P43", "http://example.com/entity/"); private final ValueSnak vs1 = new ValueSnakImpl(p1, p1); private final ValueSnak vs2 = new ValueSnakImpl(p1, p1); private final ValueSnak vs3 = new ValueSnakImpl(p2, p1); private final ValueSnak vs4 = new ValueSnakImpl(p1, p2); private final ValueSnak vsmt1 = new ValueSnakImpl(p1, new TermImpl("en", "foo")); private final ValueSnak vsmt2 = new ValueSnakImpl(p1, new MonolingualTextValueImpl("foo", "en")); private final SomeValueSnak svs1 = new SomeValueSnakImpl(p1); private final SomeValueSnak svs2 = new SomeValueSnakImpl(p1); private final SomeValueSnak svs3 = new SomeValueSnakImpl(p2); private final NoValueSnak nvs1 = new NoValueSnakImpl(p1); private final NoValueSnak nvs2 = new NoValueSnakImpl(p1); private final NoValueSnak nvs3 = new NoValueSnakImpl(p2); private final String JSON_NOVALUE_SNAK = "{\"snaktype\":\"novalue\",\"property\":\"P42\"}"; private final String JSON_SOMEVALUE_SNAK = "{\"snaktype\":\"somevalue\",\"property\":\"P42\"}"; private final String JSON_VALUE_SNAK = "{\"snaktype\":\"value\",\"property\":\"P42\",\"datatype\":\"wikibase-property\",\"datavalue\":{\"value\":{\"id\":\"P42\",\"numeric-id\":42,\"entity-type\":\"property\"},\"type\":\"wikibase-entityid\"}}"; private final String JSON_MONOLINGUAL_TEXT_VALUE_SNAK = "{\"snaktype\":\"value\",\"property\":\"P42\",\"datatype\":\"monolingualtext\",\"datavalue\":{\"value\":{\"language\":\"en\",\"text\":\"foo\"},\"type\":\"monolingualtext\"}}"; private final String JSON_SNAK_UNKNOWN_ID = "{\"snaktype\":\"value\",\"property\":\"P42\",\"datatype\":\"wikibase-funkyid\",\"datavalue\":{\"value\":{\"id\":\"FUNKY42\",\"entity-type\":\"funky\"},\"type\":\"wikibase-entityid\"}}"; private final String JSON_SNAK_UNKNOWN_DATAVALUE = "{\"snaktype\":\"value\",\"property\":\"P42\",\"datatype\":\"groovy\",\"datavalue\":{\"foo\":\"bar\",\"type\":\"groovyvalue\"}}"; @Test public void fieldsAreCorrect() { assertEquals(vs1.getPropertyId(), p1); assertEquals(vs1.getValue(), p1); } @Test public void snakHashBasedOnContent() { assertEquals(vs1.hashCode(), vs2.hashCode()); assertEquals(vsmt1.hashCode(), vsmt2.hashCode()); assertEquals(svs1.hashCode(), svs2.hashCode()); assertEquals(nvs1.hashCode(), nvs2.hashCode()); } @Test public void snakEqualityBasedOnType() { assertNotEquals(svs1, nvs1); assertNotEquals(nvs1, svs1); assertNotEquals(vs1, svs1); } @Test public void valueSnakEqualityBasedOnContent() { assertEquals(vs1, vs1); assertEquals(vs1, vs2); assertNotEquals(vs1, vs3); assertNotEquals(vs1, vs4); assertNotEquals(vs1, null); } @Test public void someValueSnakEqualityBasedOnContent() { assertEquals(svs1, svs1); assertEquals(svs1, svs2); assertNotEquals(svs1, svs3); assertNotEquals(svs1, null); assertEquals(vsmt1, vsmt2); } @Test public void noValueSnakEqualityBasedOnContent() { assertEquals(nvs1, nvs1); assertEquals(nvs1, nvs2); assertNotEquals(nvs1, nvs3); assertNotEquals(nvs1, null); } @Test(expected = NullPointerException.class) public void snakPropertyNotNull() { new SomeValueSnakImpl(null); } @Test(expected = NullPointerException.class) public void snakValueNotNull() { new ValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/"), null); } @Test public void testNoValueSnakToJava() throws IOException { assertEquals(nvs1, mapper.readValue(JSON_NOVALUE_SNAK, SnakImpl.class)); } @Test public void testNoValueSnakToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_NOVALUE_SNAK, mapper.writeValueAsString(nvs1)); } @Test public void testSomeValueSnakToJava() throws IOException { assertEquals(svs1, mapper.readValue(JSON_SOMEVALUE_SNAK, SnakImpl.class)); } @Test public void testSomeValueSnakToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_SOMEVALUE_SNAK, mapper.writeValueAsString(svs1)); } @Test public void testValueSnakToJava() throws IOException { assertEquals(vs1, mapper.readValue(JSON_VALUE_SNAK, SnakImpl.class)); } @Test public void testValueSnakToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_VALUE_SNAK, mapper.writeValueAsString(vs1)); } @Test public void testMonolingualTextValueSnakToJava() throws IOException { assertEquals(vsmt1, mapper.readValue(JSON_MONOLINGUAL_TEXT_VALUE_SNAK, SnakImpl.class)); assertEquals(vsmt2, mapper.readValue(JSON_MONOLINGUAL_TEXT_VALUE_SNAK, SnakImpl.class)); } @Test public void testMonolingualTextValueSnakToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_MONOLINGUAL_TEXT_VALUE_SNAK, mapper.writeValueAsString(vsmt1)); JsonComparator.compareJsonStrings(JSON_MONOLINGUAL_TEXT_VALUE_SNAK, mapper.writeValueAsString(vsmt2)); } @Test public void testDeserializeUnknownIdSnak() throws IOException { // We only require deserialization not to fail here mapper.readValue(JSON_SNAK_UNKNOWN_ID, SnakImpl.class); } @Test public void testDeserializeUnknownDatavalueSnak() throws IOException { // We only require deserialization not to fail here mapper.readValue(JSON_SNAK_UNKNOWN_DATAVALUE, SnakImpl.class); } } StatementDocumentAccessTest.java000066400000000000000000000201451444772566300411230ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import org.mockito.internal.util.collections.Sets; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.interfaces.*; import static org.junit.Assert.*; /** * Test general statement access methods as implemented in * {@link StatementDocument}. * * @author Markus Kroetzsch * */ public class StatementDocumentAccessTest { private final static ItemIdValue q1 = Datamodel.makeWikidataItemIdValue("Q1"); private final static ItemIdValue q2 = Datamodel.makeWikidataItemIdValue("Q2"); private final static PropertyIdValue p1 = Datamodel .makeWikidataPropertyIdValue("P1"); private final static PropertyIdValue p2 = Datamodel .makeWikidataPropertyIdValue("P2"); private final static PropertyIdValue p3 = Datamodel .makeWikidataPropertyIdValue("P3"); @Test public void testFindUniqueStatements() { Statement s1 = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(q1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(q1, p2) .withValue(q1).build(); Statement s3 = StatementBuilder.forSubjectAndProperty(q1, p2) .withValue(q2).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s1) .withStatement(s2).withStatement(s3).build(); assertTrue(id.hasStatement(p1)); assertTrue(id.hasStatement("P1")); assertTrue(id.hasStatement(p2)); assertTrue(id.hasStatement("P2")); assertFalse(id.hasStatement(p3)); assertFalse(id.hasStatement("P3")); assertEquals(s1, id.findStatement(p1)); assertEquals(s1, id.findStatement("P1")); assertNull(id.findStatement(p2)); assertNull(id.findStatement("P2")); assertNull(id.findStatement(p3)); assertNull(id.findStatement("P3")); assertEquals(q1, id.findStatementValue(p1)); assertNull(id.findStatementValue(p2)); assertNull(id.findStatementValue(p3)); } @Test public void testHasStatementValue() { Statement s1 = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(q1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(q2).build(); Statement s3 = StatementBuilder.forSubjectAndProperty(q1, p1) .withSomeValue().build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s1) .withStatement(s2).withStatement(s3).build(); assertTrue(id.hasStatementValue(p1, q2)); assertTrue(id.hasStatementValue("P1", q2)); assertTrue(id.hasStatementValue(p1, Sets.newSet(q1, p3))); assertFalse(id.hasStatementValue(p1, p3)); assertFalse(id.hasStatementValue("P2", q2)); } @Test public void testFindValueSnaks() { Statement s1 = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(q1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(q1, p2) .withSomeValue().build(); Statement s3 = StatementBuilder.forSubjectAndProperty(q1, p3) .withNoValue().build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s1) .withStatement(s2).withStatement(s3).build(); assertEquals(s1, id.findStatement(p1)); assertEquals(s1, id.findStatement("P1")); assertEquals(s2, id.findStatement(p2)); assertEquals(s2, id.findStatement("P2")); assertEquals(s3, id.findStatement(p3)); assertEquals(s3, id.findStatement("P3")); assertEquals(q1, id.findStatementValue(p1)); assertNull(id.findStatementValue(p2)); assertNull(id.findStatementValue(p3)); } @Test public void testFindStatementItemIdValue() { Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(q1).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(q1, id.findStatementValue(p1)); assertEquals(q1, id.findStatementValue("P1")); assertEquals(q1, id.findStatementEntityIdValue(p1)); assertEquals(q1, id.findStatementEntityIdValue("P1")); assertEquals(q1, id.findStatementItemIdValue(p1)); assertEquals(q1, id.findStatementItemIdValue("P1")); assertNull(id.findStatementPropertyIdValue(p1)); assertNull(id.findStatementPropertyIdValue("P1")); } @Test public void testFindStatementPropertyIdValue() { Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(p2).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(p2, id.findStatementValue(p1)); assertEquals(p2, id.findStatementValue("P1")); assertEquals(p2, id.findStatementEntityIdValue(p1)); assertEquals(p2, id.findStatementEntityIdValue("P1")); assertEquals(p2, id.findStatementPropertyIdValue(p1)); assertEquals(p2, id.findStatementPropertyIdValue("P1")); assertNull(id.findStatementItemIdValue(p1)); assertNull(id.findStatementItemIdValue("P1")); } @Test public void testFindStatementTimeValue() { TimeValue v = Datamodel.makeTimeValue((byte) 2015, (byte) 10, (byte) 16, (byte) 16, (byte) 51, (byte) 23, TimeValue.PREC_SECOND, 0, 0, 0, TimeValue.CM_GREGORIAN_PRO); Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(v).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(v, id.findStatementValue(p1)); assertEquals(v, id.findStatementValue("P1")); assertEquals(v, id.findStatementTimeValue(p1)); assertEquals(v, id.findStatementTimeValue("P1")); } @Test public void testFindStatementGlobeCoordinatesValue() { GlobeCoordinatesValue v = Datamodel.makeGlobeCoordinatesValue(1.2, 2.3, 1, GlobeCoordinatesValue.GLOBE_MOON); Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(v).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(v, id.findStatementValue(p1)); assertEquals(v, id.findStatementValue("P1")); assertEquals(v, id.findStatementGlobeCoordinatesValue(p1)); assertEquals(v, id.findStatementGlobeCoordinatesValue("P1")); } @Test public void testFindStatementQuantityValue() { QuantityValue v = Datamodel.makeQuantityValue(1234, 1233, 1235); Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(v).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(v, id.findStatementValue(p1)); assertEquals(v, id.findStatementValue("P1")); assertEquals(v, id.findStatementQuantityValue(p1)); assertEquals(v, id.findStatementQuantityValue("P1")); } @Test public void testFindStatementMonolingualTextValue() { MonolingualTextValue v = Datamodel.makeMonolingualTextValue("Test", "en"); Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(v).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(v, id.findStatementValue(p1)); assertEquals(v, id.findStatementValue("P1")); assertEquals(v, id.findStatementMonolingualTextValue(p1)); assertEquals(v, id.findStatementMonolingualTextValue("P1")); } @Test public void testFindStatementStringValue() { StringValue v = Datamodel.makeStringValue("Test"); Statement s = StatementBuilder.forSubjectAndProperty(q1, p1) .withValue(v).build(); ItemDocument id = ItemDocumentBuilder.forItemId(q1).withStatement(s) .build(); assertEquals(v, id.findStatementValue(p1)); assertEquals(v, id.findStatementValue("P1")); assertEquals(v, id.findStatementStringValue(p1)); assertEquals(v, id.findStatementStringValue("P1")); } } StatementDocumentUpdateImplTest.java000066400000000000000000000070421444772566300417670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.util.Collection; import java.util.Collections; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.StatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class StatementDocumentUpdateImplTest { private static final ItemIdValue JOHN = StatementUpdateImplTest.JOHN; static final StatementUpdate STATEMENTS = StatementUpdateBuilder.create().remove("ID123").build(); private static final Collection NO_SITELINKS = Collections.emptyList(); private static final Collection NO_REMOVED_SITELINKS = Collections.emptyList(); private static StatementDocumentUpdate create(ItemIdValue entityId, long revisionId, StatementUpdate statements) { return new ItemUpdateImpl(entityId, revisionId, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap(), statements, NO_SITELINKS, NO_REMOVED_SITELINKS); } @Test public void testFields() { StatementDocumentUpdate update = create(JOHN, 123, STATEMENTS); assertEquals(JOHN, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(STATEMENTS, update.getStatements()); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> create(JOHN, 0, null)); assertThrows(IllegalArgumentException.class, () -> create(JOHN, 0, StatementUpdateBuilder.create().add(StatementUpdateImplTest.RITA_HAS_BROWN_HAIR).build())); assertThrows(IllegalArgumentException.class, () -> create(JOHN, 0, StatementUpdateBuilder.create() .replace(StatementUpdateImplTest.RITA_HAS_BROWN_HAIR.withStatementId("ID99")).build())); } @Test public void testEmpty() { assertFalse(create(JOHN, 0, STATEMENTS).isEmpty()); assertTrue(create(JOHN, 0, StatementUpdate.EMPTY).isEmpty()); } @Test public void testEquality() { StatementDocumentUpdate update = create(JOHN, 0, STATEMENTS); assertTrue(update.equals(update)); assertTrue(update.equals(create(JOHN, 0, StatementUpdateBuilder.create().remove("ID123").build()))); assertFalse(update.equals(create(JOHN, 123, STATEMENTS))); assertFalse(update.equals(create(JOHN, 0, StatementUpdate.EMPTY))); } @Test public void testHashCode() { StatementDocumentUpdate update1 = create(JOHN, 123, STATEMENTS); StatementDocumentUpdate update2 = create(JOHN, 123, StatementUpdateBuilder.create().remove("ID123").build()); assertEquals(update1.hashCode(), update2.hashCode()); } } StatementGroupTest.java000066400000000000000000000135421444772566300373220ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.util.*; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; public class StatementGroupTest { private EntityIdValue subject = new ItemIdValueImpl("Q42", "http://wikidata.org/entity/"); private PropertyIdValue property = new PropertyIdValueImpl("P42", "http://wikidata.org/entity/"); private Snak mainSnak = new ValueSnakImpl(property, subject); private Statement statement1 = new StatementImpl("MyId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); private Statement statement2 = new StatementImpl("MyId", StatementRank.PREFERRED, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); private Statement statementEmptyId = new StatementImpl("", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); private Statement statementDeprecrated = new StatementImpl("DepId", StatementRank.DEPRECATED, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); private StatementGroup sg1 = new StatementGroupImpl(Collections.singletonList(statement1)); private StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(statement1)); @Test public void implementsCollection() { assertFalse(sg1.isEmpty()); assertEquals(1, sg1.size()); assertTrue(sg1.contains(statement1)); assertFalse(sg1.contains(statement2)); assertTrue(sg1.iterator().hasNext()); assertEquals(sg1.iterator().next(), statement1); assertArrayEquals(new Statement[] {statement1}, sg1.toArray()); } @Test public void statementListIsCorrect() { assertEquals(sg1.getStatements(), Collections.singletonList(statement1)); } @Test public void getBestStatementsWithPreferred() { assertEquals( new StatementGroupImpl(Collections.singletonList(statement2)), new StatementGroupImpl(Arrays.asList(statement1, statement2)).getBestStatements() ); } @Test public void getBestStatementsWithoutPreferred() { assertEquals( new StatementGroupImpl(Collections.singletonList(statement1)), new StatementGroupImpl(Collections.singletonList(statement1)).getBestStatements() ); } @Test public void getBestStatementsEmpty() { assertNull( new StatementGroupImpl(Collections.singletonList(statementDeprecrated)).getBestStatements() ); } @Test public void propertyIsCorrect() { assertEquals(sg1.getProperty(), property); } @Test public void subjectIsCorrect() { assertEquals(sg1.getSubject(), subject); } @Test public void equalityBasedOnContent() { List statements = new ArrayList<>(); statements.add(statement1); statements.add(statement2); StatementGroup sg3 = new StatementGroupImpl(statements); assertEquals(sg1, sg1); assertEquals(sg1, sg2); assertNotEquals(sg1, sg3); assertNotEquals(sg1, null); assertNotEquals(sg1, this); } @Test public void hashBasedOnContent() { assertEquals(sg1.hashCode(), sg2.hashCode()); } @Test(expected = NullPointerException.class) public void statementListNotNull() { new StatementGroupImpl(null); } @Test(expected = IllegalArgumentException.class) public void statementListNotEmpty() { new StatementGroupImpl(Collections.emptyList()); } @Test(expected = IllegalArgumentException.class) public void statementListRequiresSameSubject() { List statements = new ArrayList<>(); statements.add(statement1); EntityIdValue subject2 = new ItemIdValueImpl("Q23", "http://wikidata.org/entity/"); Snak mainSnak = new NoValueSnakImpl(property); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), subject2); statements.add(s2); new StatementGroupImpl(statements); } @Test(expected = IllegalArgumentException.class) public void statementListRequiresSameProperty() { List statements = new ArrayList<>(); statements.add(statement1); PropertyIdValue property2 = new PropertyIdValueImpl("P23", "http://wikidata.org/entity/"); Snak mainSnak = new NoValueSnakImpl(property2); Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); statements.add(s2); new StatementGroupImpl(statements); } @Test public void addSameStatementToGroup() { StatementGroup added = sg1.withStatement(statement1); assertEquals(sg1, added); } @Test public void addStatementWithMatchingId() { StatementGroup added = sg1.withStatement(statement2); assertEquals(new StatementGroupImpl(Collections.singletonList(statement2)), added); } @Test public void addStatementEmptyId() { StatementGroup initial = new StatementGroupImpl(Collections.singletonList(statementEmptyId)); StatementGroup added = initial.withStatement(statementEmptyId); assertEquals(new StatementGroupImpl(Arrays.asList(statementEmptyId, statementEmptyId)), added); } } StatementImplTest.java000066400000000000000000000153221444772566300371250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.io.IOException; import java.util.Collections; import java.util.List; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.interfaces.*; public class StatementImplTest { private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/"); private final EntityIdValue subjet = new ItemIdValueImpl("Q1", "http://example.com/entity/"); private final EntityIdValue value = new ItemIdValueImpl("Q42", "http://example.com/entity/"); private final PropertyIdValue property = new PropertyIdValueImpl("P42", "http://example.com/entity/"); private final ValueSnak mainSnak = new ValueSnakImpl(property, value); private final List qualifiers = Collections.singletonList(new SnakGroupImpl(Collections.singletonList(mainSnak))); private final List references = Collections.singletonList(new ReferenceImpl(qualifiers)); private final Claim claim = new ClaimImpl(subjet, mainSnak, qualifiers); private final Statement s1 = new StatementImpl("MyId", StatementRank.PREFERRED, mainSnak, qualifiers, references, subjet); private final Statement s2 = new StatementImpl("MyId", StatementRank.PREFERRED, mainSnak, qualifiers, references, subjet); private final String JSON_STATEMENT = "{\"rank\":\"preferred\",\"references\":[{\"snaks\":{\"P42\":[{\"property\":\"P42\",\"datatype\":\"wikibase-item\",\"datavalue\":{\"value\":{\"id\":\"Q42\",\"numeric-id\":42,\"entity-type\":\"item\"},\"type\":\"wikibase-entityid\"},\"snaktype\":\"value\"}]},\"snaks-order\":[\"P42\"]}],\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"datatype\":\"wikibase-item\",\"datavalue\":{\"value\":{\"id\":\"Q42\",\"numeric-id\":42,\"entity-type\":\"item\"},\"type\":\"wikibase-entityid\"},\"snaktype\":\"value\"},\"qualifiers-order\":[\"P42\"],\"type\":\"statement\",\"qualifiers\":{\"P42\":[{\"property\":\"P42\",\"datatype\":\"wikibase-item\",\"datavalue\":{\"value\":{\"id\":\"Q42\",\"numeric-id\":42,\"entity-type\":\"item\"},\"type\":\"wikibase-entityid\"},\"snaktype\":\"value\"}]}}"; private final Statement smallStatement = new StatementImpl("MyId", StatementRank.PREFERRED, mainSnak, Collections.emptyList(), Collections.emptyList(), subjet); private final String JSON_SMALL_STATEMENT = "{\"rank\":\"preferred\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"datatype\":\"wikibase-item\",\"datavalue\":{\"value\":{\"id\":\"Q42\",\"numeric-id\":42,\"entity-type\":\"item\"},\"type\":\"wikibase-entityid\"},\"snaktype\":\"value\"},\"type\":\"statement\"}"; @Test public void gettersWorking() { assertEquals(s1.getClaim(), claim); assertEquals(s1.getMainSnak(), mainSnak); assertEquals(s1.getQualifiers(), qualifiers); assertEquals(s1.getReferences(), references); assertEquals(s1.getRank(), StatementRank.PREFERRED); assertEquals(s1.getStatementId(), "MyId"); assertEquals(s1.getValue(), value); assertEquals(s1.getSubject(), subjet); } @Test(expected = NullPointerException.class) public void mainSnakNotNull() { new StatementImpl("MyId", StatementRank.NORMAL, null, Collections.emptyList(), Collections.emptyList(), value); } @Test public void referencesCanBeNull() { Statement statement = new StatementImpl("MyId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), null, value); assertTrue(statement.getReferences().isEmpty()); } @Test(expected = NullPointerException.class) public void rankNotNull() { new StatementImpl("MyId", null, mainSnak, Collections.emptyList(), Collections.emptyList(), value); } @Test public void idCanBeNull() { Statement statement = new StatementImpl(null, StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), value); assertEquals(statement.getStatementId(), ""); } @Test public void withId() { Statement statement = new StatementImpl(null, StatementRank.NORMAL, claim.getMainSnak(), claim.getQualifiers(), Collections.emptyList(), claim.getSubject()); Statement withId = statement.withStatementId("some id"); assertEquals("some id", withId.getStatementId()); } @Test public void hashBasedOnContent() { assertEquals(s1.hashCode(), s2.hashCode()); } @Test public void equalityBasedOnContent() { Statement sDiffClaim = new StatementImpl("MyId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), new ItemIdValueImpl("Q43", "http://wikidata.org/entity/")); Statement sDiffReferences = new StatementImpl("MyId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.singletonList(new ReferenceImpl( Collections.singletonList(new SnakGroupImpl(Collections.singletonList(mainSnak))) )), value); Statement sDiffRank = new StatementImpl("MyId", StatementRank.PREFERRED, mainSnak, Collections.emptyList(), Collections.emptyList(), value); Statement sDiffId = new StatementImpl("MyOtherId", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), value); assertEquals(s1, s1); assertEquals(s1, s2); assertNotEquals(s1, sDiffClaim); assertNotEquals(s1, sDiffReferences); assertNotEquals(s1, sDiffRank); assertNotEquals(s1, sDiffId); assertNotEquals(s1, null); assertNotEquals(s1, this); } @Test public void testStatementToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_STATEMENT, mapper.writeValueAsString(s1)); } @Test public void testStatementToJava() throws IOException { assertEquals(s1, mapper.readValue(JSON_STATEMENT, StatementImpl.PreStatement.class).withSubject(subjet)); } @Test public void testSmallStatementToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_SMALL_STATEMENT, mapper.writeValueAsString(smallStatement)); } @Test public void testSmallStatementToJava() throws IOException { assertEquals(smallStatement, mapper.readValue(JSON_SMALL_STATEMENT, StatementImpl.PreStatement.class).withSubject(subjet)); } } StatementUpdateImplTest.java000066400000000000000000000240511444772566300402670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.toJson; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.StringValue; public class StatementUpdateImplTest { private static final Collection NO_STATEMENTS = Collections.emptyList(); private static final Collection NO_IDS = Collections.emptyList(); static final ItemIdValue JOHN = Datamodel.makeWikidataItemIdValue("Q1"); private static final EntityIdValue RITA = Datamodel.makeWikidataItemIdValue("Q2"); private static final PropertyIdValue HAIR = Datamodel.makeWikidataPropertyIdValue("P1"); private static final PropertyIdValue EYES = Datamodel.makeWikidataPropertyIdValue("P2"); private static final StringValue BROWN = Datamodel.makeStringValue("brown"); private static final StringValue SILVER = Datamodel.makeStringValue("silver"); private static final StringValue BLUE = Datamodel.makeStringValue("blue"); private static final Statement NOBODY_HAS_BROWN_HAIR = StatementBuilder .forSubjectAndProperty(ItemIdValue.NULL, HAIR) .withValue(BROWN) .build(); private static final Statement NOBODY_ALREADY_HAS_BROWN_HAIR = NOBODY_HAS_BROWN_HAIR.withStatementId("ID1"); static final Statement JOHN_HAS_BROWN_HAIR = StatementBuilder .forSubjectAndProperty(JOHN, HAIR) .withValue(BROWN) .build(); static final Statement RITA_HAS_BROWN_HAIR = StatementBuilder .forSubjectAndProperty(RITA, HAIR) .withValue(BROWN) .build(); private static final Statement JOHN_HAS_SILVER_HAIR = StatementBuilder .forSubjectAndProperty(JOHN, HAIR) .withValue(SILVER) .build(); private static final Statement JOHN_ALREADY_HAS_SILVER_HAIR = JOHN_HAS_SILVER_HAIR.withStatementId("ID5"); private static final Statement JOHN_HAS_BLUE_EYES = StatementBuilder .forSubjectAndProperty(JOHN, EYES) .withValue(BLUE) .build(); private static final Statement JOHN_ALREADY_HAS_BLUE_EYES = JOHN_HAS_BLUE_EYES.withStatementId("ID8"); @Test public void testFields() { StatementUpdate update = new StatementUpdateImpl( Arrays.asList(JOHN_HAS_BROWN_HAIR), Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES), Arrays.asList(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId())); assertEquals(Arrays.asList(JOHN_HAS_BROWN_HAIR), update.getAdded()); assertThat(update.getReplaced().keySet(), containsInAnyOrder(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId())); assertEquals(JOHN_ALREADY_HAS_BLUE_EYES, update.getReplaced().get(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId())); assertThat(update.getRemoved(), containsInAnyOrder(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId())); } @Test public void testValidation() { new StatementUpdateImpl(NO_STATEMENTS, NO_STATEMENTS, NO_IDS); assertThrows(NullPointerException.class, () -> new StatementUpdateImpl(null, NO_STATEMENTS, NO_IDS)); assertThrows(NullPointerException.class, () -> new StatementUpdateImpl(NO_STATEMENTS, null, NO_IDS)); assertThrows(NullPointerException.class, () -> new StatementUpdateImpl(NO_STATEMENTS, NO_STATEMENTS, null)); assertThrows(NullPointerException.class, () -> new StatementUpdateImpl( Arrays.asList(JOHN_HAS_BROWN_HAIR, null), NO_STATEMENTS, NO_IDS)); assertThrows(NullPointerException.class, () -> new StatementUpdateImpl( NO_STATEMENTS, Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES, null), NO_IDS)); assertThrows(NullPointerException.class, () -> new StatementUpdateImpl( NO_STATEMENTS, NO_STATEMENTS, Arrays.asList(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId(), null))); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES), NO_STATEMENTS, NO_IDS)); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( NO_STATEMENTS, Arrays.asList(JOHN_HAS_BLUE_EYES), NO_IDS)); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( NO_STATEMENTS, NO_STATEMENTS, Arrays.asList(" "))); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( NO_STATEMENTS, Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES, JOHN_ALREADY_HAS_BLUE_EYES), NO_IDS)); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl(NO_STATEMENTS, NO_STATEMENTS, Arrays.asList( JOHN_ALREADY_HAS_BLUE_EYES.getStatementId(), JOHN_ALREADY_HAS_BLUE_EYES.getStatementId()))); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl(NO_STATEMENTS, Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES), Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId()))); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( Arrays.asList(RITA_HAS_BROWN_HAIR), Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES), NO_IDS)); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( Arrays.asList(NOBODY_HAS_BROWN_HAIR), NO_STATEMENTS, NO_IDS)); assertThrows(IllegalArgumentException.class, () -> new StatementUpdateImpl( NO_STATEMENTS, Arrays.asList(NOBODY_ALREADY_HAS_BROWN_HAIR), NO_IDS)); } @Test public void testImmutability() { List added = new ArrayList<>(); List replaced = new ArrayList<>(); List removed = new ArrayList<>(); added.add(JOHN_HAS_BROWN_HAIR); replaced.add(JOHN_ALREADY_HAS_BLUE_EYES); removed.add(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId()); StatementUpdate update = new StatementUpdateImpl(added, replaced, removed); assertThrows(UnsupportedOperationException.class, () -> update.getAdded().add(JOHN_HAS_SILVER_HAIR)); assertThrows(UnsupportedOperationException.class, () -> update.getReplaced() .put(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId(), JOHN_ALREADY_HAS_SILVER_HAIR)); assertThrows(UnsupportedOperationException.class, () -> update.getRemoved().add(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId())); added.add(JOHN_HAS_SILVER_HAIR); replaced.add(JOHN_ALREADY_HAS_SILVER_HAIR); removed.add(JOHN_ALREADY_HAS_BLUE_EYES.getStatementId()); assertEquals(1, update.getAdded().size()); assertEquals(1, update.getReplaced().size()); assertEquals(1, update.getRemoved().size()); } @Test public void testEmpty() { List added = Arrays.asList(JOHN_HAS_BROWN_HAIR); List replaced = Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES); List removed = Arrays.asList(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId()); assertTrue(new StatementUpdateImpl(NO_STATEMENTS, NO_STATEMENTS, NO_IDS).isEmpty()); assertFalse(new StatementUpdateImpl(added, NO_STATEMENTS, NO_IDS).isEmpty()); assertFalse(new StatementUpdateImpl(NO_STATEMENTS, replaced, NO_IDS).isEmpty()); assertFalse(new StatementUpdateImpl(NO_STATEMENTS, NO_STATEMENTS, removed).isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { List added = Arrays.asList(JOHN_HAS_BROWN_HAIR); List replaced = Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES); List removed = Arrays.asList(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId()); StatementUpdate update = new StatementUpdateImpl(added, replaced, removed); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new StatementUpdateImpl(added, replaced, removed))); assertFalse(update.equals(new StatementUpdateImpl(NO_STATEMENTS, replaced, removed))); assertFalse(update.equals(new StatementUpdateImpl(added, NO_STATEMENTS, removed))); assertFalse(update.equals(new StatementUpdateImpl(added, replaced, NO_IDS))); } @Test public void testHashCode() { StatementUpdate update1 = new StatementUpdateImpl( Arrays.asList(JOHN_HAS_BROWN_HAIR), Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES), Arrays.asList(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId())); StatementUpdate update2 = new StatementUpdateImpl( Arrays.asList(JOHN_HAS_BROWN_HAIR), Arrays.asList(JOHN_ALREADY_HAS_BLUE_EYES), Arrays.asList(JOHN_ALREADY_HAS_SILVER_HAIR.getStatementId())); assertEquals(update1.hashCode(), update2.hashCode()); } @Test public void testJson() { assertThat(StatementUpdateBuilder.create().build(), producesJson("[]")); assertThat(StatementUpdateBuilder.create().add(JOHN_HAS_BROWN_HAIR).build(), producesJson("[" + toJson(JOHN_HAS_BROWN_HAIR) + "]")); assertThat(StatementUpdateBuilder.create().replace(JOHN_ALREADY_HAS_BLUE_EYES).build(), producesJson("[" + toJson(JOHN_ALREADY_HAS_BLUE_EYES) + "]")); assertThat(StatementUpdateBuilder.create().remove("ID123").build(), producesJson("[{'id':'ID123','remove':''}]")); } } StringValueImplTest.java000066400000000000000000000041271444772566300374250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import java.io.IOException; public class StringValueImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final StringValue s1 = new StringValueImpl("some string"); private final StringValue s2 = new StringValueImpl("some string"); private final String JSON_STRING_VALUE = "{\"type\":\"string\",\"value\":\"some string\"}"; @Test public void stringIsCorrect() { assertEquals(s1.getString(), "some string"); } @Test public void equalityBasedOnContent() { StringValue s3 = new StringValueImpl("another string"); assertEquals(s1, s1); assertEquals(s1, s2); assertNotEquals(s1, s3); assertNotEquals(s1, null); assertNotEquals(s1, this); } @Test public void hashBasedOnContent() { assertEquals(s1.hashCode(), s2.hashCode()); } @Test(expected = NullPointerException.class) public void stringNotNull() { new StringValueImpl(null); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_STRING_VALUE, mapper.writeValueAsString(s1)); } @Test public void testToJava() throws IOException { assertEquals(s1, mapper.readValue(JSON_STRING_VALUE, ValueImpl.class)); } } TermImplTest.java000066400000000000000000000047351444772566300360760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import java.io.IOException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; public class TermImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final MonolingualTextValue mt1 = new TermImpl("en", "some string"); private final MonolingualTextValue mt2 = new TermImpl("en", "some string"); private final String JSON_TERM = "{\"language\":\"en\",\"value\":\"some string\"}"; @Test public void dataIsCorrect() { assertEquals(mt1.getText(), "some string"); assertEquals(mt1.getLanguageCode(), "en"); } @Test public void equalityBasedOnContent() { MonolingualTextValue mtDiffString = new TermImpl( "another string", "en"); MonolingualTextValue mtDiffLanguageCode = new TermImpl( "some string", "en-GB"); assertEquals(mt1, mt1); assertEquals(mt1, mt2); assertNotEquals(mt1, mtDiffString); assertNotEquals(mt1, mtDiffLanguageCode); assertNotEquals(mt1, null); assertNotEquals(mt1, this); } @Test public void hashBasedOnContent() { assertEquals(mt1.hashCode(), mt2.hashCode()); } @Test(expected = NullPointerException.class) public void textNotNull() { new TermImpl("en", null); } @Test(expected = NullPointerException.class) public void languageCodeNotNull() { new TermImpl(null, "some text"); } @Test public void testToJava() throws IOException { assertEquals(mt1, mapper.readValue(JSON_TERM, TermImpl.class)); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_TERM, mapper.writeValueAsString(mt1)); } } TermUpdateImplTest.java000066400000000000000000000126041444772566300372330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.wikidata.wdtk.datamodel.implementation.JsonTestUtils.producesJson; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; public class TermUpdateImplTest { private static final MonolingualTextValue EN = Datamodel.makeMonolingualTextValue("hello", "en"); private static final MonolingualTextValue EN2 = Datamodel.makeMonolingualTextValue("hi", "en"); private static final MonolingualTextValue SK = Datamodel.makeMonolingualTextValue("ahoj", "sk"); private static final MonolingualTextValue CS = Datamodel.makeMonolingualTextValue("nazdar", "cs"); @Test public void testFields() { TermUpdate update = new TermUpdateImpl(Arrays.asList(EN, SK), Arrays.asList("de", "fr")); assertThat(update.getRemoved(), containsInAnyOrder("de", "fr")); assertThat(update.getModified().keySet(), containsInAnyOrder("sk", "en")); assertEquals(EN, update.getModified().get("en")); assertEquals(SK, update.getModified().get("sk")); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> new TermUpdateImpl(null, Collections.emptyList())); assertThrows(NullPointerException.class, () -> new TermUpdateImpl(Collections.emptyList(), null)); assertThrows(NullPointerException.class, () -> new TermUpdateImpl(Arrays.asList(EN, null), Collections.emptyList())); assertThrows(NullPointerException.class, () -> new TermUpdateImpl(Collections.emptyList(), Arrays.asList("en", null))); assertThrows(IllegalArgumentException.class, () -> new TermUpdateImpl(Arrays.asList(EN, EN2), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new TermUpdateImpl(Arrays.asList(EN, EN), Collections.emptyList())); assertThrows(IllegalArgumentException.class, () -> new TermUpdateImpl(Collections.emptyList(), Arrays.asList("en", ""))); assertThrows(IllegalArgumentException.class, () -> new TermUpdateImpl(Collections.emptyList(), Arrays.asList("en", "en"))); assertThrows(IllegalArgumentException.class, () -> new TermUpdateImpl(Arrays.asList(EN), Arrays.asList("en"))); } @Test public void testImmutability() { List modified = new ArrayList<>(); List removed = new ArrayList<>(); modified.add(EN); removed.add("sk"); TermUpdate update = new TermUpdateImpl(modified, removed); assertThrows(UnsupportedOperationException.class, () -> update.getModified().put("cs", CS)); assertThrows(UnsupportedOperationException.class, () -> update.getRemoved().add("fr")); modified.add(CS); removed.add("fr"); assertEquals(1, update.getModified().size()); assertEquals(1, update.getRemoved().size()); } @Test public void testEmpty() { assertTrue(new TermUpdateImpl(Collections.emptyList(), Collections.emptyList()).isEmpty()); assertFalse(new TermUpdateImpl(Arrays.asList(EN), Collections.emptyList()).isEmpty()); assertFalse(new TermUpdateImpl(Collections.emptyList(), Arrays.asList("sk")).isEmpty()); } @Test @SuppressWarnings("unlikely-arg-type") public void testEquality() { List modified = Arrays.asList(EN); List removed = Arrays.asList("sk"); TermUpdate update = new TermUpdateImpl(modified, removed); assertFalse(update.equals(null)); assertFalse(update.equals(this)); assertTrue(update.equals(update)); assertTrue(update.equals(new TermUpdateImpl(modified, removed))); assertFalse(update.equals(new TermUpdateImpl(Collections.emptyList(), removed))); assertFalse(update.equals(new TermUpdateImpl(modified, Collections.emptyList()))); } @Test public void testHashCode() { TermUpdate update1 = new TermUpdateImpl(Arrays.asList(EN, SK), Arrays.asList("cs", "fr")); TermUpdate update2 = new TermUpdateImpl(Arrays.asList(EN, SK), Arrays.asList("cs", "fr")); assertEquals(update1.hashCode(), update2.hashCode()); } @Test public void testJson() { assertThat(TermUpdateBuilder.create().build(), producesJson("{}")); assertThat(TermUpdateBuilder.create().put(EN).build(), producesJson("{'en':{'language':'en','value':'hello'}}")); assertThat(TermUpdateBuilder.create().remove("en").build(), producesJson("{'en':{'language':'en','remove':''}}")); } } TermedDocumentUpdateImplTest.java000066400000000000000000000146761444772566300412560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anEmptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.AliasUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.StatementUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocumentUpdate; public class TermedDocumentUpdateImplTest { private static final ItemIdValue JOHN = StatementUpdateImplTest.JOHN; private static final StatementUpdate STATEMENTS = StatementDocumentUpdateImplTest.STATEMENTS; private static final MonolingualTextValue EN = Datamodel.makeMonolingualTextValue("hello", "en"); private static final MonolingualTextValue SK = Datamodel.makeMonolingualTextValue("ahoj", "sk"); private static final TermUpdate LABELS = TermUpdateBuilder.create().remove("de").build(); static final TermUpdate DESCRIPTIONS = TermUpdateBuilder.create().remove("en").build(); static final AliasUpdate ALIAS = AliasUpdateBuilder.create().add(EN).build(); static final Map ALIASES = new HashMap<>(); static { ALIASES.put("en", ALIAS); } private static TermedStatementDocumentUpdate create( ItemIdValue entityId, long revisionId, StatementUpdate statements, TermUpdate labels, TermUpdate descriptions, Map aliases) { return new ItemUpdateImpl(entityId, revisionId, labels, descriptions, aliases, statements, Collections.emptyList(), Collections.emptyList()); } @Test public void testFields() { TermedStatementDocumentUpdate update = create(JOHN, 123, STATEMENTS, LABELS, DESCRIPTIONS, ALIASES); assertEquals(JOHN, update.getEntityId()); assertEquals(123, update.getBaseRevisionId()); assertSame(STATEMENTS, update.getStatements()); assertSame(LABELS, update.getLabels()); assertSame(DESCRIPTIONS, update.getDescriptions()); assertEquals(ALIASES, update.getAliases()); } @Test public void testValidation() { assertThrows(NullPointerException.class, () -> create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, null, ALIASES)); assertThrows(NullPointerException.class, () -> create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, null)); Map aliases = new HashMap<>(); aliases.put(null, AliasUpdate.EMPTY); assertThrows(NullPointerException.class, () -> create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, aliases)); aliases.clear(); aliases.put("en", null); assertThrows(NullPointerException.class, () -> create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, aliases)); aliases.clear(); aliases.put(" ", AliasUpdate.EMPTY); assertThrows(IllegalArgumentException.class, () -> create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, aliases)); aliases.clear(); aliases.put("de", AliasUpdateBuilder.create().add(EN).build()); assertThrows(IllegalArgumentException.class, () -> create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, aliases)); aliases.clear(); aliases.put("en", AliasUpdate.EMPTY); assertThat( create(JOHN, 123, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, aliases).getAliases(), is(anEmptyMap())); } @Test public void testImmutability() { Map aliases = new HashMap<>(); aliases.put("en", AliasUpdateBuilder.create().add(EN).build()); TermedStatementDocumentUpdate update = create( JOHN, 0, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, aliases); assertThrows(UnsupportedOperationException.class, () -> update.getAliases().remove("en")); aliases.put("sk", AliasUpdateBuilder.create().add(SK).build()); assertEquals(1, update.getAliases().size()); } @Test public void testEmpty() { assertTrue(create(JOHN, 0, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, Collections.emptyMap()) .isEmpty()); assertFalse(create(JOHN, 0, StatementUpdate.EMPTY, LABELS, TermUpdate.EMPTY, Collections.emptyMap()).isEmpty()); assertFalse(create(JOHN, 0, StatementUpdate.EMPTY, TermUpdate.EMPTY, DESCRIPTIONS, Collections.emptyMap()) .isEmpty()); assertFalse(create(JOHN, 0, StatementUpdate.EMPTY, TermUpdate.EMPTY, TermUpdate.EMPTY, ALIASES).isEmpty()); } @Test public void testEquality() { TermedStatementDocumentUpdate update = create(JOHN, 0, STATEMENTS, LABELS, DESCRIPTIONS, ALIASES); assertTrue(update.equals(update)); assertTrue(update.equals(create(JOHN, 0, STATEMENTS, LABELS, DESCRIPTIONS, ALIASES))); assertFalse(update.equals(create(JOHN, 0, STATEMENTS, TermUpdate.EMPTY, DESCRIPTIONS, ALIASES))); assertFalse(update.equals(create(JOHN, 0, STATEMENTS, LABELS, TermUpdate.EMPTY, ALIASES))); assertFalse(update.equals(create(JOHN, 0, STATEMENTS, LABELS, DESCRIPTIONS, Collections.emptyMap()))); } @Test public void testHashCode() { TermedStatementDocumentUpdate update1 = create(JOHN, 123, STATEMENTS, LABELS, DESCRIPTIONS, ALIASES); TermedStatementDocumentUpdate update2 = create(JOHN, 123, STATEMENTS, LABELS, DESCRIPTIONS, ALIASES); assertEquals(update1.hashCode(), update2.hashCode()); } } TermedStatementDocumentImplTest.java000066400000000000000000000057031444772566300417670ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.*; import static org.junit.Assert.*; public class TermedStatementDocumentImplTest { private Map> initialStatements = null; private Statement statementA = null; private Statement statementEmptyId = null; private String statementIdA = "myIdA"; private String statementIdB = "myIdB"; @Before public void setUp() { ItemIdValue subject = new ItemIdValueImpl("Q42", "http://wikidata.org/entity/"); PropertyIdValue property = new PropertyIdValueImpl("P42", "http://wikidata.org/entity/"); Snak mainSnak = new ValueSnakImpl(property, subject); statementA = new StatementImpl(statementIdA, StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); Statement statementB = new StatementImpl(statementIdB, StatementRank.PREFERRED, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); statementEmptyId = new StatementImpl("", StatementRank.NORMAL, mainSnak, Collections.emptyList(), Collections.emptyList(), subject); List statements = Arrays.asList(statementA, statementB); initialStatements = new HashMap<>(); initialStatements.put(property.getId(), statements); } @Test public void removeNoStatements() { Map> removed = TermedStatementDocumentImpl.removeStatements( Collections.emptySet(), initialStatements); assertEquals(removed, initialStatements); } @Test public void removeAllStatements() { Set toRemove = new HashSet<>(); toRemove.add(statementIdA); toRemove.add(statementIdB); Map> removed = TermedStatementDocumentImpl.removeStatements(toRemove, initialStatements); assertTrue(removed.isEmpty()); } @Test public void addExistingStatement() { Map> added = TermedStatementDocumentImpl.addStatementToGroups(statementA, initialStatements); assertEquals(initialStatements, added); } @Test public void addSameStatementWithoutId() { Map> added = TermedStatementDocumentImpl.addStatementToGroups(statementEmptyId, initialStatements); assertNotEquals(initialStatements, added); } } TimeValueImplTest.java000066400000000000000000000152721444772566300370600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import java.io.IOException; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class TimeValueImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final TimeValue t1 = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); private final TimeValue t2 = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); private final TimeValue t3 = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, "foo"); private final String JSON_TIME_VALUE = "{\"value\":{\"time\":\"+2007-05-12T10:45:00Z\",\"timezone\":60,\"before\":0,\"after\":1,\"precision\":14,\"calendarmodel\":\"http://www.wikidata.org/entity/Q1985727\"},\"type\":\"time\"}"; @Test public void storedValuesCorrect() { assertEquals(t1.getYear(), 2007); assertEquals(t1.getMonth(), 5); assertEquals(t1.getDay(), 12); assertEquals(t1.getHour(), 10); assertEquals(t1.getMinute(), 45); assertEquals(t1.getSecond(), 0); assertEquals(t1.getPrecision(), TimeValue.PREC_SECOND); assertEquals(t1.getBeforeTolerance(), 0); assertEquals(t1.getAfterTolerance(), 1); assertEquals(t1.getTimezoneOffset(), 60); assertEquals(t1.getPreferredCalendarModel(), TimeValue.CM_GREGORIAN_PRO); } @Test public void getPreferredCalendarModelItemId() { assertEquals(new ItemIdValueImpl("Q1985727", "http://www.wikidata.org/entity/"), t1.getPreferredCalendarModelItemId()); } @Test(expected = IllegalArgumentException.class) public void getPreferredCalendarModelItemIdInvalidIri() { t3.getPreferredCalendarModelItemId(); } @Test public void equalityBasedOnContent() { TimeValue tdYear = new TimeValueImpl(2013, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdMonth = new TimeValueImpl(2007, (byte) 6, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdDay = new TimeValueImpl(2007, (byte) 5, (byte) 13, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdHour = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 11, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdMinute = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 47, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdSecond = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 1, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdTimezone = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 120, TimeValue.CM_GREGORIAN_PRO); TimeValue tdBefore = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 1, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdAfter = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 2, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdPrecision = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_DAY, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); TimeValue tdCalendar = new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_JULIAN_PRO); assertEquals(t1, t1); assertEquals(t1, t2); assertNotEquals(t1, tdYear); assertNotEquals(t1, tdMonth); assertNotEquals(t1, tdDay); assertNotEquals(t1, tdHour); assertNotEquals(t1, tdMinute); assertNotEquals(t1, tdSecond); assertNotEquals(t1, tdTimezone); assertNotEquals(t1, tdBefore); assertNotEquals(t1, tdAfter); assertNotEquals(t1, tdPrecision); assertNotEquals(t1, tdCalendar); assertNotEquals(t1, null); assertNotEquals(t1, this); } @Test public void hashBasedOnContent() { assertEquals(t1.hashCode(), t2.hashCode()); } @Test(expected = NullPointerException.class) public void calendarModelNotNull() { new TimeValueImpl(2007, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, null); } @Test public void largeYearValues() { // May 12 in the first year after the beginning of the universe: TimeValue t = new TimeValueImpl(-13800000000L, (byte) 5, (byte) 12, (byte) 10, (byte) 45, (byte) 0, TimeValue.PREC_SECOND, 0, 1, 60, TimeValue.CM_GREGORIAN_PRO); assertEquals(t.getYear(), -13800000000L); } @Test public void testToJson() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_TIME_VALUE, mapper.writeValueAsString(t1)); } @Test public void testToJava() throws IOException { assertEquals(t1, mapper.readValue(JSON_TIME_VALUE, ValueImpl.class)); } @Test public void testJulianToGregorian() { final TimeValue tJulian = new TimeValueImpl(1143, (byte)10, (byte) 5, (byte) 1, (byte) 2, (byte) 0, TimeValue.PREC_MINUTE, 0, 1, 0, TimeValue.CM_JULIAN_PRO); final TimeValue gregorian = tJulian.toGregorian(); assertEquals(1143, gregorian.getYear()); assertEquals(10, gregorian.getMonth()); assertEquals(12, gregorian.getDay()); assertEquals(1, gregorian.getHour()); assertEquals(2, gregorian.getMinute()); assertEquals(0, gregorian.getSecond()); assertEquals(TimeValue.PREC_MINUTE, gregorian.getPrecision()); assertEquals(0, gregorian.getBeforeTolerance()); assertEquals(1, gregorian.getAfterTolerance()); assertEquals(0, gregorian.getTimezoneOffset()); assertEquals(TimeValue.CM_GREGORIAN_PRO, gregorian.getPreferredCalendarModel()); } } UnsupportedEntityIdValueTest.java000066400000000000000000000111011444772566300413250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.implementation; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import java.io.IOException; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedEntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class UnsupportedEntityIdValueTest { private final ObjectMapper mapper = new DatamodelMapper("http://www.wikidata.org/entity/"); private final String JSON_UNSUPPORTED_VALUE_1 = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"funky\",\"id\":\"Z343\"}}"; private final String JSON_UNSUPPORTED_VALUE_2 = "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"shiny\",\"id\":\"R8989\",\"foo\":\"bar\"}}"; private final String JSON_UNSUPPORTED_VALUE_NO_TYPE = "{\"type\":\"wikibase-entityid\",\"value\":{\"id\":\"Z343\"}}"; private UnsupportedEntityIdValue firstValue, secondValue, noType; @Before public void deserializeValues() throws IOException { firstValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_1, UnsupportedEntityIdValueImpl.class); secondValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_2, UnsupportedEntityIdValueImpl.class); noType = mapper.readValue(JSON_UNSUPPORTED_VALUE_NO_TYPE, UnsupportedEntityIdValueImpl.class); } @Test public void testEquals() throws IOException { Value otherValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_1, ValueImpl.class); assertEquals(firstValue, otherValue); assertNotEquals(secondValue, otherValue); assertNotEquals(firstValue, noType); assertNotEquals(noType, secondValue); } @Test public void testHash() throws IOException { Value otherValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_2, ValueImpl.class); assertEquals(secondValue.hashCode(), otherValue.hashCode()); } @Test public void testSerialize() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_UNSUPPORTED_VALUE_1, mapper.writeValueAsString(firstValue)); JsonComparator.compareJsonStrings(JSON_UNSUPPORTED_VALUE_2, mapper.writeValueAsString(secondValue)); JsonComparator.compareJsonStrings(JSON_UNSUPPORTED_VALUE_NO_TYPE, mapper.writeValueAsString(noType)); } @Test public void testToString() { assertEquals(ToString.toString(firstValue), firstValue.toString()); assertEquals(ToString.toString(secondValue), secondValue.toString()); } @Test public void testGetTypeString() { assertEquals("funky", firstValue.getEntityTypeJsonString()); assertEquals("shiny", secondValue.getEntityTypeJsonString()); } @Test public void testGetIri() { assertEquals("http://www.wikidata.org/entity/Z343", firstValue.getIri()); assertEquals("http://www.wikidata.org/entity/R8989", secondValue.getIri()); assertEquals("http://www.wikidata.org/entity/Z343", noType.getIri()); } @Test public void testGetId() { assertEquals("Z343", firstValue.getId()); assertEquals("R8989", secondValue.getId()); assertEquals("Z343", noType.getId()); } @Test public void testGetEntityType() { assertEquals("http://www.wikidata.org/ontology#Funky", firstValue.getEntityType()); assertEquals("http://www.wikidata.org/ontology#Shiny", secondValue.getEntityType()); assertEquals(EntityIdValue.ET_UNSUPPORTED, noType.getEntityType()); } @Test public void testGetEntityTypeString() { assertEquals("funky", firstValue.getEntityTypeJsonString()); assertEquals("shiny", secondValue.getEntityTypeJsonString()); assertNull(noType.getEntityTypeJsonString()); } @Test public void testIsPlaceholder() { assertFalse(firstValue.isPlaceholder()); } } UnsupportedValueImplTest.java000066400000000000000000000053121444772566300405040ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /*- * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.ToString; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import static org.junit.Assert.*; import java.io.IOException; import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public class UnsupportedValueImplTest { private final ObjectMapper mapper = new ObjectMapper(); private final String JSON_UNSUPPORTED_VALUE_1 = "{\"type\":\"funky\",\"value\":\"groovy\"}"; private final String JSON_UNSUPPORTED_VALUE_2 = "{\"type\":\"shiny\",\"number\":42}"; private UnsupportedValue firstValue, secondValue; @Before public void deserializeFirstValue() throws IOException { firstValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_1, UnsupportedValueImpl.class); secondValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_2, UnsupportedValueImpl.class); } @Test public void testEquals() throws IOException { Value otherValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_1, ValueImpl.class); assertEquals(firstValue, otherValue); assertNotEquals(secondValue, otherValue); } @Test public void testHash() throws IOException { Value otherValue = mapper.readValue(JSON_UNSUPPORTED_VALUE_2, ValueImpl.class); assertEquals(secondValue.hashCode(), otherValue.hashCode()); } @Test public void testSerialize() throws JsonProcessingException { JsonComparator.compareJsonStrings(JSON_UNSUPPORTED_VALUE_1, mapper.writeValueAsString(firstValue)); JsonComparator.compareJsonStrings(JSON_UNSUPPORTED_VALUE_2, mapper.writeValueAsString(secondValue)); } @Test public void testToString() { assertEquals(ToString.toString(firstValue), firstValue.toString()); assertEquals(ToString.toString(secondValue), secondValue.toString()); } @Test public void testGetTypeString() { assertEquals("funky", firstValue.getTypeJsonString()); assertEquals("shiny", secondValue.getTypeJsonString()); } } WikimediaLanguageCodesTest.java000066400000000000000000000031041444772566300406570ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementationpackage org.wikidata.wdtk.datamodel.implementation; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import org.junit.Test; import org.wikidata.wdtk.datamodel.interfaces.WikimediaLanguageCodes; public class WikimediaLanguageCodesTest { @Test public void getSomeLanguageCodes() { assertEquals("gsw", WikimediaLanguageCodes.getLanguageCode("als")); assertEquals("en", WikimediaLanguageCodes.getLanguageCode("en")); } @Test(expected = IllegalArgumentException.class) public void getUnknownLanguageCode() { WikimediaLanguageCodes.getLanguageCode("unknown"); } @Test public void fixDeprecatedLanguageCode() { assertEquals("nb", WikimediaLanguageCodes.fixLanguageCodeIfDeprecated("no")); assertEquals("en", WikimediaLanguageCodes.fixLanguageCodeIfDeprecated("en")); } @Test public void fixUnknownLanguageCode() { assertEquals("unknown", WikimediaLanguageCodes.fixLanguageCodeIfDeprecated("unknown")); } } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/interfaces/000077500000000000000000000000001444772566300320065ustar00rootroot00000000000000NullEntityIdsTest.java000066400000000000000000000104541444772566300362050ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/interfaces/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.datamodel.interfaces; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; public class NullEntityIdsTest { static class TestValueVisitor implements ValueVisitor { @Override public String visit(EntityIdValue value) { return value.getId(); } @Override public String visit(GlobeCoordinatesValue value) { return null; } @Override public String visit(MonolingualTextValue value) { return null; } @Override public String visit(QuantityValue value) { return null; } @Override public String visit(StringValue value) { return null; } @Override public String visit(TimeValue value) { return null; } @Override public String visit(UnsupportedValue value) { return null; } } @Test public void testNullItemId() { TestValueVisitor tvv = new TestValueVisitor(); assertEquals("Q0", ItemIdValue.NULL.accept(tvv)); assertEquals("Q0", ItemIdValue.NULL.getId()); assertEquals("http://localhost/entity/", ItemIdValue.NULL.getSiteIri()); assertEquals(EntityIdValue.ET_ITEM, ItemIdValue.NULL.getEntityType()); assertEquals("http://localhost/entity/Q0", ItemIdValue.NULL.getIri()); assertTrue(ItemIdValue.NULL.isPlaceholder()); } @Test public void testNullPropertyId() { TestValueVisitor tvv = new TestValueVisitor(); assertEquals("P0", PropertyIdValue.NULL.accept(tvv)); assertEquals("P0", PropertyIdValue.NULL.getId()); assertEquals("http://localhost/entity/", PropertyIdValue.NULL.getSiteIri()); assertEquals(EntityIdValue.ET_PROPERTY, PropertyIdValue.NULL.getEntityType()); assertEquals("http://localhost/entity/P0", PropertyIdValue.NULL.getIri()); assertTrue(PropertyIdValue.NULL.isPlaceholder()); } @Test public void testNullMediaInfoId() { TestValueVisitor tvv = new TestValueVisitor(); assertEquals("M0", MediaInfoIdValue.NULL.accept(tvv)); assertEquals("M0", MediaInfoIdValue.NULL.getId()); assertEquals("http://localhost/entity/", MediaInfoIdValue.NULL.getSiteIri()); assertEquals(EntityIdValue.ET_MEDIA_INFO, MediaInfoIdValue.NULL.getEntityType()); assertEquals("http://localhost/entity/M0", MediaInfoIdValue.NULL.getIri()); assertTrue(MediaInfoIdValue.NULL.isPlaceholder()); } @Test public void testNullLexemeId() { TestValueVisitor tvv = new TestValueVisitor(); assertEquals("L0", LexemeIdValue.NULL.accept(tvv)); assertEquals("L0", LexemeIdValue.NULL.getId()); assertEquals("http://localhost/entity/", LexemeIdValue.NULL.getSiteIri()); assertEquals(EntityIdValue.ET_LEXEME, LexemeIdValue.NULL.getEntityType()); assertEquals("http://localhost/entity/L0", LexemeIdValue.NULL.getIri()); assertTrue(LexemeIdValue.NULL.isPlaceholder()); } @Test public void testNullSenseId() { TestValueVisitor tvv = new TestValueVisitor(); assertEquals("L0-S0", SenseIdValue.NULL.accept(tvv)); assertEquals("L0-S0", SenseIdValue.NULL.getId()); assertEquals("http://localhost/entity/", SenseIdValue.NULL.getSiteIri()); assertEquals(EntityIdValue.ET_SENSE, SenseIdValue.NULL.getEntityType()); assertEquals("http://localhost/entity/L0-S0", SenseIdValue.NULL.getIri()); assertTrue(SenseIdValue.NULL.isPlaceholder()); } @Test public void testNullFormId() { TestValueVisitor tvv = new TestValueVisitor(); assertEquals("L0-F0", FormIdValue.NULL.accept(tvv)); assertEquals("L0-F0", FormIdValue.NULL.getId()); assertEquals("http://localhost/entity/", FormIdValue.NULL.getSiteIri()); assertEquals(EntityIdValue.ET_FORM, FormIdValue.NULL.getEntityType()); assertEquals("http://localhost/entity/L0-F0", FormIdValue.NULL.getIri()); assertTrue(FormIdValue.NULL.isPlaceholder()); } } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/resources/000077500000000000000000000000001444772566300234655ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/resources/JsonDeserializer/000077500000000000000000000000001444772566300267415ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/resources/JsonDeserializer/item.json000066400000000000000000000507061444772566300306020ustar00rootroot00000000000000{ "pageid": 37687, "ns": 0, "title": "Q34987", "lastrevid": 1062625956, "modified": "2019-11-26T12:42:18Z", "type": "item", "id": "Q34987", "labels": { "sw": { "language": "sw", "value": "Kibile" }, "de": { "language": "de", "value": "Bile" }, "pms": { "language": "pms", "value": "Lenga Bile" }, "en": { "language": "en", "value": "Bile" }, "hr": { "language": "hr", "value": "Bile jezik" }, "ru": { "language": "ru", "value": "Биле" }, "en-gb": { "language": "en-gb", "value": "Bile" }, "ce": { "language": "ce", "value": "Биле" } }, "descriptions": { "de": { "language": "de", "value": "Sprache" }, "en": { "language": "en", "value": "language" }, "he": { "language": "he", "value": "שפה" }, "br": { "language": "br", "value": "yezh" }, "fr": { "language": "fr", "value": "langue" }, "es": { "language": "es", "value": "lengua" }, "it": { "language": "it", "value": "lingua" }, "nl": { "language": "nl", "value": "taal" }, "la": { "language": "la", "value": "lingua" }, "cy": { "language": "cy", "value": "iaith" } }, "aliases": { "en": [ { "language": "en", "value": "Bile language" } ] }, "claims": { "P220": [ { "mainsnak": { "snaktype": "value", "property": "P220", "hash": "77ed58adb4323efb034906aeb3b7d8cc8936c56c", "datavalue": { "value": "bil", "type": "string" }, "datatype": "external-id" }, "type": "statement", "id": "Q34987$A5EE01BE-0FD2-478C-9405-5ECF86B0DF9B", "rank": "normal", "references": [ { "hash": "56b370ad342ff9d47a4119b3f53f894995cea4b7", "snaks": { "P248": [ { "snaktype": "value", "property": "P248", "hash": "27fbd18e13a0a2c1e2ae172bfbfc66577ffb38a2", "datavalue": { "value": { "entity-type": "item", "numeric-id": 75488338, "id": "Q75488338" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ], "P854": [ { "snaktype": "value", "property": "P854", "hash": "68a06f5f5ab362597f2d12e45856619795420364", "datavalue": { "value": "https://op.europa.eu/web/eu-vocabularies/at-dataset/-/resource/dataset/language", "type": "string" }, "datatype": "url" } ] }, "snaks-order": [ "P248", "P854" ] }, { "hash": "fa278ebfc458360e5aed63d5058cca83c46134f1", "snaks": { "P143": [ { "snaktype": "value", "property": "P143", "hash": "e4f6d9441d0600513c4533c672b5ab472dc73694", "datavalue": { "value": { "entity-type": "item", "numeric-id": 328, "id": "Q328" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ] }, "snaks-order": [ "P143" ] } ] } ], "P31": [ { "mainsnak": { "snaktype": "value", "property": "P31", "hash": "ac8ce0eb2aee500f93443c85f173bdbfece859a4", "datavalue": { "value": { "entity-type": "item", "numeric-id": 34770, "id": "Q34770" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "Q34987$8B2CFC26-08CD-4366-8ABC-FD28B39392C2", "rank": "normal", "references": [ { "hash": "b096ce736246deae93782819465859680c66015c", "snaks": { "P248": [ { "snaktype": "value", "property": "P248", "hash": "375610daaf9b20dda81cd83553d79636108b64cb", "datavalue": { "value": { "entity-type": "item", "numeric-id": 14790, "id": "Q14790" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ] }, "snaks-order": [ "P248" ] } ] }, { "mainsnak": { "snaktype": "value", "property": "P31", "hash": "61edb75e9440807d73635f592270b56ef4b18020", "datavalue": { "value": { "entity-type": "item", "numeric-id": 1288568, "id": "Q1288568" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "Q34987$E9A36CAC-E888-4F3C-9298-83468DFE4E8F", "rank": "normal", "references": [ { "hash": "ca9fe08a869c1c95f210c9ac9e0fd47b086a8379", "snaks": { "P854": [ { "snaktype": "value", "property": "P854", "hash": "fed318b7dbb903a7b459b51df9c56dde40e8dce7", "datavalue": { "value": "https://iso639-3.sil.org/code/bil", "type": "string" }, "datatype": "url" } ] }, "snaks-order": [ "P854" ] } ] } ], "P646": [ { "mainsnak": { "snaktype": "value", "property": "P646", "hash": "3a20650bf2b9eeac4cf2f81e9bccfbd9698c5c5f", "datavalue": { "value": "/m/0h94lsr", "type": "string" }, "datatype": "external-id" }, "type": "statement", "id": "Q34987$4E0156BD-51F3-442C-A7D3-D393A8620DD0", "rank": "normal", "references": [ { "hash": "2b00cb481cddcac7623114367489b5c194901c4a", "snaks": { "P248": [ { "snaktype": "value", "property": "P248", "hash": "a94b740202b097dd33355e0e6c00e54b9395e5e0", "datavalue": { "value": { "entity-type": "item", "numeric-id": 15241312, "id": "Q15241312" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ], "P577": [ { "snaktype": "value", "property": "P577", "hash": "fde79ecb015112d2f29229ccc1ec514ed3e71fa2", "datavalue": { "value": { "time": "+2013-10-28T00:00:00Z", "timezone": 0, "before": 0, "after": 0, "precision": 11, "calendarmodel": "http://www.wikidata.org/entity/Q1985727" }, "type": "time" }, "datatype": "time" } ] }, "snaks-order": [ "P248", "P577" ] } ] } ], "P1394": [ { "mainsnak": { "snaktype": "value", "property": "P1394", "hash": "110e1758d4edc808f5618943d8f7ea2eb492bb39", "datavalue": { "value": "bile1244", "type": "string" }, "datatype": "external-id" }, "type": "statement", "id": "Q34987$9353E43A-0D74-4791-812D-087D4087ADAB", "rank": "normal", "references": [ { "hash": "fa278ebfc458360e5aed63d5058cca83c46134f1", "snaks": { "P143": [ { "snaktype": "value", "property": "P143", "hash": "e4f6d9441d0600513c4533c672b5ab472dc73694", "datavalue": { "value": { "entity-type": "item", "numeric-id": 328, "id": "Q328" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ] }, "snaks-order": [ "P143" ] } ] } ], "P1014": [ { "mainsnak": { "snaktype": "value", "property": "P1014", "hash": "e4e5eea85ea1a5e5004c5571d77682bdbd53b06d", "datavalue": { "value": "300264027", "type": "string" }, "datatype": "external-id" }, "type": "statement", "id": "Q34987$F921B970-E45E-4D03-A201-1DAAB8889C02", "rank": "normal" } ], "P305": [ { "mainsnak": { "snaktype": "value", "property": "P305", "hash": "f76b15a9f6825d800d7ba1244f50303c27d6ae41", "datavalue": { "value": "bil", "type": "string" }, "datatype": "external-id" }, "type": "statement", "qualifiers": { "P580": [ { "snaktype": "value", "property": "P580", "hash": "47d0d9e39591d66e42099639ca9b282d03d1b039", "datavalue": { "value": { "time": "+2009-07-29T00:00:00Z", "timezone": 0, "before": 0, "after": 0, "precision": 11, "calendarmodel": "http://www.wikidata.org/entity/Q1985727" }, "type": "time" }, "datatype": "time" } ] }, "qualifiers-order": [ "P580" ], "id": "Q34987$98A36E34-18B7-4FDF-B6F9-AE1A6065ABAE", "rank": "normal", "references": [ { "hash": "4aa3c46831638dff4cf3c42ae0f0acf3b3e1234a", "snaks": { "P248": [ { "snaktype": "value", "property": "P248", "hash": "14411bc8b193205898cff7811c162db62ecec994", "datavalue": { "value": { "entity-type": "item", "numeric-id": 57271947, "id": "Q57271947" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ], "P813": [ { "snaktype": "value", "property": "P813", "hash": "f0f70081902969e9832f43d66e5825e99355508e", "datavalue": { "value": { "time": "+2019-02-08T00:00:00Z", "timezone": 0, "before": 0, "after": 0, "precision": 11, "calendarmodel": "http://www.wikidata.org/entity/Q1985727" }, "type": "time" }, "datatype": "time" } ], "P577": [ { "snaktype": "value", "property": "P577", "hash": "11cec956c1dfec04779f742dbf48c43a5cd07719", "datavalue": { "value": { "time": "+2009-07-29T00:00:00Z", "timezone": 0, "before": 0, "after": 0, "precision": 11, "calendarmodel": "http://www.wikidata.org/entity/Q1985727" }, "type": "time" }, "datatype": "time" } ], "P1476": [ { "snaktype": "value", "property": "P1476", "hash": "61c3b0f256fd7c592d87366c8539a8a4e53b3b14", "datavalue": { "value": { "text": "Bile", "language": "en" }, "type": "monolingualtext" }, "datatype": "monolingualtext" } ] }, "snaks-order": [ "P248", "P813", "P577", "P1476" ] } ] } ], "P1627": [ { "mainsnak": { "snaktype": "value", "property": "P1627", "hash": "081219177c4d274bfbe05fd6ccb06ac375f1a06d", "datavalue": { "value": "bil", "type": "string" }, "datatype": "external-id" }, "type": "statement", "id": "Q34987$862FDABC-75DB-4ECD-9ABB-3CE8FDA59CA9", "rank": "normal" } ], "P3823": [ { "mainsnak": { "snaktype": "value", "property": "P3823", "hash": "5e5fdd6a244fe705c817046992d272db8e66f02e", "datavalue": { "value": { "entity-type": "item", "numeric-id": 29051555, "id": "Q29051555" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "Q34987$1D3FF6BD-4B52-4687-8499-619F3B973DEC", "rank": "normal", "references": [ { "hash": "816f6cabdb11971f95dc3faca697f5c2f6f02248", "snaks": { "P248": [ { "snaktype": "value", "property": "P248", "hash": "375610daaf9b20dda81cd83553d79636108b64cb", "datavalue": { "value": { "entity-type": "item", "numeric-id": 14790, "id": "Q14790" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ], "P813": [ { "snaktype": "value", "property": "P813", "hash": "02f847ace2885e56ef89186fe9e65776dfd4eb44", "datavalue": { "value": { "time": "+2019-05-10T00:00:00Z", "timezone": 0, "before": 0, "after": 0, "precision": 11, "calendarmodel": "http://www.wikidata.org/entity/Q1985727" }, "type": "time" }, "datatype": "time" } ], "P854": [ { "snaktype": "value", "property": "P854", "hash": "304d2db566e4f943c5b027dce032d0bff1ef0efd", "datavalue": { "value": "https://www.ethnologue.com/language/bil", "type": "string" }, "datatype": "url" } ] }, "snaks-order": [ "P248", "P813", "P854" ] } ] } ], "P17": [ { "mainsnak": { "snaktype": "value", "property": "P17", "hash": "8116beb62ea32777b0b68313efb65d9b2713adc7", "datavalue": { "value": { "entity-type": "item", "numeric-id": 1033, "id": "Q1033" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "Q34987$D5976F14-7A23-4505-BEEA-EC4664CBAD2F", "rank": "normal" } ], "P2341": [ { "mainsnak": { "snaktype": "value", "property": "P2341", "hash": "7d0912c4f9e250997b8c2bc48b5716aa5d07a309", "datavalue": { "value": { "entity-type": "item", "numeric-id": 337514, "id": "Q337514" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "Q34987$49FFB340-8004-4E83-80F1-633480A009B2", "rank": "normal" }, { "mainsnak": { "snaktype": "value", "property": "P2341", "hash": "1c831bf6c99e4eee1187c79f94909cab30cefa1c", "datavalue": { "value": { "entity-type": "item", "numeric-id": 509300, "id": "Q509300" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "Q34987$AAA30F5B-FD53-48CF-9579-8C24E1F7C573", "rank": "normal" } ], "P2888": [ { "mainsnak": { "snaktype": "value", "property": "P2888", "hash": "b73d33b1e40b47d117a1a4bf521d8f4f91184474", "datavalue": { "value": "http://publications.europa.eu/resource/authority/language/BIL", "type": "string" }, "datatype": "url" }, "type": "statement", "id": "Q34987$46734317-5E4C-4CE3-A43E-514BA7D702ED", "rank": "normal", "references": [ { "hash": "56b370ad342ff9d47a4119b3f53f894995cea4b7", "snaks": { "P248": [ { "snaktype": "value", "property": "P248", "hash": "27fbd18e13a0a2c1e2ae172bfbfc66577ffb38a2", "datavalue": { "value": { "entity-type": "item", "numeric-id": 75488338, "id": "Q75488338" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" } ], "P854": [ { "snaktype": "value", "property": "P854", "hash": "68a06f5f5ab362597f2d12e45856619795420364", "datavalue": { "value": "https://op.europa.eu/web/eu-vocabularies/at-dataset/-/resource/dataset/language", "type": "string" }, "datatype": "url" } ] }, "snaks-order": [ "P248", "P854" ] } ] } ] }, "sitelinks": { "dewiki": { "site": "dewiki", "title": "Bile (Sprache)", "badges": [] }, "enwiki": { "site": "enwiki", "title": "Bile language", "badges": [] }, "hrwiki": { "site": "hrwiki", "title": "Bile jezik", "badges": [] }, "pmswiki": { "site": "pmswiki", "title": "Lenga Bile", "badges": [] }, "ruwiki": { "site": "ruwiki", "title": "Биле", "badges": [] }, "swwiki": { "site": "swwiki", "title": "Kibile", "badges": [] } } } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/resources/JsonDeserializer/lexeme.json000066400000000000000000000031621444772566300311150ustar00rootroot00000000000000{ "pageid": 55199558, "ns": 146, "title": "Lexeme:L3872", "lastrevid": 1080404095, "modified": "2019-12-20T14:47:45Z", "type": "lexeme", "id": "L3872", "lemmas": { "en": { "language": "en", "value": "business" } }, "lexicalCategory": "Q1084", "language": "Q1860", "claims": {}, "forms": [ { "id": "L3872-F1", "representations": { "en": { "language": "en", "value": "business" } }, "grammaticalFeatures": [ "Q110786" ], "claims": [] }, { "id": "L3872-F2", "representations": { "en": { "language": "en", "value": "businesses" } }, "grammaticalFeatures": [ "Q146786" ], "claims": [] } ], "senses": [ { "id": "L3872-S1", "glosses": { "en": { "language": "en", "value": "economic activity done by a businessperson" } }, "claims": { "P5137": [ { "mainsnak": { "snaktype": "value", "property": "P5137", "hash": "e4c2933a6e50281693686282fb92e64b41fd80b1", "datavalue": { "value": { "entity-type": "item", "numeric-id": 19862406, "id": "Q19862406" }, "type": "wikibase-entityid" } }, "type": "statement", "id": "L3872-S1$14106EF8-9525-41FA-A358-285A314276EA", "rank": "normal" } ] } } ] } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/resources/JsonDeserializer/mediainfo.json000066400000000000000000000024161444772566300315720ustar00rootroot00000000000000{ "pageid": 74698470, "ns": 6, "title": "File:Chick Corea & Stanley Clarke.jpg", "lastrevid": 363818804, "modified": "2019-08-30T13:07:37Z", "type": "mediainfo", "id": "M74698470", "labels": {}, "descriptions": {}, "statements": { "P180": [ { "mainsnak": { "snaktype": "value", "property": "P180", "hash": "a83bcd35f5bd70a205d9eabf429841a6a091d973", "datavalue": { "value": { "entity-type": "item", "numeric-id": 192465, "id": "Q192465" }, "type": "wikibase-entityid" } }, "type": "statement", "id": "M74698470$c598e90e-44b9-6214-64aa-367e4b2415b6", "rank": "normal" }, { "mainsnak": { "snaktype": "value", "property": "P180", "hash": "988edf83d80d66fd97714b6c977f5f3097ee194d", "datavalue": { "value": { "entity-type": "item", "numeric-id": 453406, "id": "Q453406" }, "type": "wikibase-entityid" } }, "type": "statement", "id": "M74698470$209cb38c-4471-7fc6-5a0e-8232ab7a506c", "rank": "normal" } ] } } Wikidata-Toolkit-0.14.6/wdtk-datamodel/src/test/resources/JsonDeserializer/property.json000066400000000000000000000173651444772566300315340ustar00rootroot00000000000000{ "pageid": 30034564, "ns": 120, "title": "Property:P3467", "lastrevid": 830819467, "modified": "2019-01-09T10:01:52Z", "type": "property", "datatype": "external-id", "id": "P3467", "labels": { "en": { "language": "en", "value": "Inventario Sculture - Polo Museale Fiorentino" }, "fr": { "language": "fr", "value": "identifiant Inventario Sculture" }, "de": { "language": "de", "value": "Inventario Sculture - Polo Museale Fiorentino" }, "nl": { "language": "nl", "value": "Inventario Sculture - Polo Museale Fiorentino-identificatiecode" }, "it": { "language": "it", "value": "Inventario Sculture - Polo Museale Fiorentino" }, "uk": { "language": "uk", "value": "ідентифікатор Inventario Sculture - Polo Museale Fiorentino" }, "ar": { "language": "ar", "value": "مخزون تماثيل متاحف فلورنسا" } }, "descriptions": { "en": { "language": "en", "value": "identifier of an artwork in the inventory of sculptures of Florentine museums" }, "fr": { "language": "fr", "value": "identifiant d'une œuvre d'art dans l'inventaire des musée de Florence" }, "de": { "language": "de", "value": "Identifikator im Skulpturenverzeichnis der Florentiner Museen" }, "uk": { "language": "uk", "value": "ідентифікатор твору в реєстрі скульптур флорентійських музеїв" } }, "aliases": { "fr": [ { "language": "fr", "value": "identifiant Musées florentins dans l'inventaire des sculptures" } ], "ar": [ { "language": "ar", "value": "تماثيل متاحف فلورنسا" }, { "language": "ar", "value": "متاحف فلورنسا" } ] }, "claims": { "P1630": [ { "mainsnak": { "snaktype": "value", "property": "P1630", "hash": "9b277739598a6d628ba53d6f60715f9710a02da0", "datavalue": { "value": "http://www.polomuseale.firenze.it/invSculture/scheda.asp?position=1&ninv=$1", "type": "string" }, "datatype": "string" }, "type": "statement", "id": "P3467$ae93c71e-4136-76e7-fe65-8a9b7b56fc91", "rank": "normal" } ], "P1855": [ { "mainsnak": { "snaktype": "value", "property": "P1855", "hash": "97a73a7c139f349a76231c5a859b8135751d9da0", "datavalue": { "value": { "entity-type": "item", "numeric-id": 179900, "id": "Q179900" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "qualifiers": { "P3467": [ { "snaktype": "value", "property": "P3467", "hash": "78e7e897edc03f2a240745407bb7a72c908b90b2", "datavalue": { "value": "1076", "type": "string" }, "datatype": "external-id" } ] }, "qualifiers-order": [ "P3467" ], "id": "P3467$47877943-4a51-e3cb-ebf3-994a93462c16", "rank": "normal" } ], "P1659": [ { "mainsnak": { "snaktype": "value", "property": "P1659", "hash": "995c8df1850c9b9ba0b64e046e87589f6b2166b8", "datavalue": { "value": { "entity-type": "property", "numeric-id": 1726, "id": "P1726" }, "type": "wikibase-entityid" }, "datatype": "wikibase-property" }, "type": "statement", "id": "P3467$3207cb44-4f32-1d3f-a528-26263d9c23b2", "rank": "normal" }, { "mainsnak": { "snaktype": "value", "property": "P1659", "hash": "5350e2f34085bd8efe0fc409f6fafc8e2a19ab66", "datavalue": { "value": { "entity-type": "property", "numeric-id": 3504, "id": "P3504" }, "type": "wikibase-entityid" }, "datatype": "wikibase-property" }, "type": "statement", "id": "P3467$f4e38de1-4e20-d61a-df96-2712a2d06968", "rank": "normal" } ], "P31": [ { "mainsnak": { "snaktype": "value", "property": "P31", "hash": "f4decee31e9752960d9623ea58d1dfd672b31341", "datavalue": { "value": { "entity-type": "item", "numeric-id": 19847637, "id": "Q19847637" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "P3467$20910560-30AC-45BA-A59A-3D360FFCD8F1", "rank": "normal" }, { "mainsnak": { "snaktype": "value", "property": "P31", "hash": "1f57f44f72b149c4f7fca8eedaf8f07f7ca3324f", "datavalue": { "value": { "entity-type": "item", "numeric-id": 44847669, "id": "Q44847669" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "P3467$861b3b1c-4456-a89d-609a-920d583accde", "rank": "normal" }, { "mainsnak": { "snaktype": "value", "property": "P31", "hash": "c65038907f055c45440c54a515d4b0d2962edf25", "datavalue": { "value": { "entity-type": "item", "numeric-id": 45312863, "id": "Q45312863" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "P3467$e3c68b14-41b3-68fb-8311-6360b1f2a90b", "rank": "normal" } ], "P3254": [ { "mainsnak": { "snaktype": "value", "property": "P3254", "hash": "edbfa6b14cc828f6026a70112475cb16704a5a1a", "datavalue": { "value": "https://www.wikidata.org/wiki/Wikidata:Property_proposal/Inventario_Sculture_-_Polo_Museale_Fiorentino", "type": "string" }, "datatype": "url" }, "type": "statement", "id": "P3467$FE8D4091-AABB-4445-98C6-DF894971F5C4", "rank": "normal" } ], "P17": [ { "mainsnak": { "snaktype": "value", "property": "P17", "hash": "b48529e9f7e0898ab1ddaefe8547cdb863e0167c", "datavalue": { "value": { "entity-type": "item", "numeric-id": 38, "id": "Q38" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "P3467$e4c3c067-48ba-cdae-aae7-3cf6295abf67", "rank": "normal" } ], "P2875": [ { "mainsnak": { "snaktype": "value", "property": "P2875", "hash": "dbf395280e2e27dbd093e482bb2915baae993e77", "datavalue": { "value": { "entity-type": "item", "numeric-id": 45312584, "id": "Q45312584" }, "type": "wikibase-entityid" }, "datatype": "wikibase-item" }, "type": "statement", "id": "P3467$91df8651-4147-8940-be42-22d7e30dd1b4", "rank": "normal" } ] } } Wikidata-Toolkit-0.14.6/wdtk-distribution/000077500000000000000000000000001444772566300204725ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-distribution/LICENSE.txt000066400000000000000000000261351444772566300223240ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-distribution/pom.xml000066400000000000000000000127411444772566300220140ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-distribution jar Wikidata Toolkit Distribution Module to package Wikidata Toolkit for releases ${project.groupId} wdtk-dumpfiles ${project.version} ${project.groupId} wdtk-rdf ${project.version} ${project.groupId} wdtk-wikibaseapi ${project.version} library org.apache.maven.plugins maven-shade-plugin 3.5.0 wdtk-library package shade true org.wikidata.wdtk wikidata-toolkit-${project.version} false wdtk-library-deps package shade false wikidata-toolkit-with-dependencies-${project.version} false sign ${project.groupId} wdtk-datamodel ${project.version} sources ${project.groupId} wdtk-dumpfiles ${project.version} sources ${project.groupId} wdtk-rdf ${project.version} sources ${project.groupId} wdtk-storage ${project.version} sources ${project.groupId} wdtk-util ${project.version} sources ${project.groupId} wdtk-wikibaseapi ${project.version} sources ${project.groupId} wdtk-parent ${project.version} javadoc org.apache.maven.plugins maven-assembly-plugin 3.6.0 create-source-jar package single wikidata-toolkit-${project.version} ${basedir}/src/main/assembly/sources.xml create-javadoc-jar package single wikidata-toolkit-${project.version} ${basedir}/src/main/assembly/javadoc.xml Wikidata-Toolkit-0.14.6/wdtk-distribution/src/000077500000000000000000000000001444772566300212615ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-distribution/src/main/000077500000000000000000000000001444772566300222055ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-distribution/src/main/assembly/000077500000000000000000000000001444772566300240245ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-distribution/src/main/assembly/javadoc.xml000066400000000000000000000015471444772566300261640ustar00rootroot00000000000000 javadoc jar . org.wikidata.wdtk:wdtk-parent:*:javadoc . true false ${basedir} LICENSE.txt . Wikidata-Toolkit-0.14.6/wdtk-distribution/src/main/assembly/release.xml000066400000000000000000000017651444772566300261770ustar00rootroot00000000000000 release jar . org.wikidata.wdtk:* org.wikidata.wdtk:*:*:sources org.wikidata.wdtk:*:*:javadoc . true runtime false ${basedir} LICENSE.txt Wikidata-Toolkit-0.14.6/wdtk-distribution/src/main/assembly/sources.xml000066400000000000000000000015361444772566300262360ustar00rootroot00000000000000 sources jar . org.wikidata.wdtk:*:*:sources . true false ${basedir} LICENSE.txt . Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/000077500000000000000000000000001444772566300177435ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/LICENSE.txt000066400000000000000000000261351444772566300215750ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/pom.xml000066400000000000000000000025621444772566300212650ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-dumpfiles jar Wikidata Toolkit Dump File Handling WDTK support for processing Wikibase dump files ${project.groupId} wdtk-datamodel ${project.version} ${project.groupId} wdtk-util ${project.version} ${project.groupId} wdtk-storage ${project.version} ${project.groupId} wdtk-testing ${project.version} test src/test/resources Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/000077500000000000000000000000001444772566300205325ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/000077500000000000000000000000001444772566300214565ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/000077500000000000000000000000001444772566300223775ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/000077500000000000000000000000001444772566300231665ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/000077500000000000000000000000001444772566300247635ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300257345ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/000077500000000000000000000000001444772566300277245ustar00rootroot00000000000000DumpContentType.java000066400000000000000000000016321444772566300336140ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Enum to refer to the various kinds of dumps that are supported by this * implementation. * * @author Markus Kroetzsch * */ public enum DumpContentType { DAILY, CURRENT, FULL, SITES, JSON }DumpProcessingController.java000066400000000000000000000574401444772566300355300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; import java.nio.file.FileAlreadyExistsException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.DocumentDataFilter; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessorBroker; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessorFilter; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Sites; import org.wikidata.wdtk.dumpfiles.wmf.WmfDumpFileManager; import org.wikidata.wdtk.util.DirectoryManager; import org.wikidata.wdtk.util.DirectoryManagerFactory; import org.wikidata.wdtk.util.WebResourceFetcher; import org.wikidata.wdtk.util.WebResourceFetcherImpl; /** * A class for controlling the processing of dump files through a unified * interface. The settings of the controller specify how dump files should be * fetched and processed. *

* The methods for registering listeners to process dump files that contain * revisions are * {@link #registerMwRevisionProcessor(MwRevisionProcessor, String, boolean)} * and * {@link #registerEntityDocumentProcessor(EntityDocumentProcessor, String, boolean)}. *

* For processing the content of wiki pages, there are two modes of operation: * revision-based and entity-document-based. The former is used when processing * dump files that contain revisions. These hold detailed information about each * revision (revision number, author, time, etc.) that could be used by revision * processors. *

* The entity-document-based operation is used when processing simplified dumps * that contain only the content of the current (entity) pages of a wiki. In * this case, no additional information is available and only the entity * document processors are called (since we have no revisions). Both modes use * the same entity document processors. In revision-based runs, it is possible * to restrict some entity document processors to certain content models only * (e.g., to process only properties). In entity-document-based runs, this is * ignored and all entity document processors get to see all the data. *

* The methods for revision-based processing of selected dump files (and * downloading them first, finding out which ones are relevant) are * {@link #processAllRecentRevisionDumps()}, * {@link #processMostRecentMainDump()}, and * {@link #processMostRecentMainDump()}. *

* To extract the most recent sitelinks information, the method * {@link #getSitesInformation()} can be used. To get information about the * revision dump files that the main methods will process, one can use * {@link #getWmfDumpFileManager()} to get access to the underlying dump file * manager, which can be used to get access to dump file data. *

* The controller will also catch exceptions that may occur when trying to * download and read dump files. They will be turned into logged errors. * * @author Markus Kroetzsch * */ public class DumpProcessingController { static final Logger logger = LoggerFactory .getLogger(DumpProcessingController.class); /** * Helper value class to store the registration settings of one listener. * * @author Markus Kroetzsch * */ static class ListenerRegistration { final String model; final boolean onlyCurrentRevisions; ListenerRegistration(String model, boolean onlyCurrentRevisions) { this.model = model; this.onlyCurrentRevisions = onlyCurrentRevisions; } @Override public int hashCode() { if (this.model == null) { return (this.onlyCurrentRevisions ? 1 : 0); } else { return 2 * this.model.hashCode() + (this.onlyCurrentRevisions ? 1 : 0); } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof ListenerRegistration)) { return false; } ListenerRegistration other = (ListenerRegistration) obj; if (this.model == null) { return other.model == null && this.onlyCurrentRevisions == other.onlyCurrentRevisions; } else { return this.model.equals(other.model) && this.onlyCurrentRevisions == other.onlyCurrentRevisions; } } } /** * Map of all {@link EntityDocumentProcessor} object registered so far, * based on the model and revision (current or not) they are registered for. */ final HashMap> entityDocumentProcessors; /** * Map of all {@link MwRevisionProcessor} object registered so far, based on * the model and revision (current or not) they are registered for. */ final HashMap> mwRevisionProcessors; /** * The name of the project whose dumps are processed here. */ final String projectName; /** * Should only current dumps be considered? This is changed automatically if * some registered listener is interested in non-current dumps. */ boolean preferCurrent = true; /** * The object used to access the Web or null if Web access is disabled. This * is stored permanently here so that tests in this package can set the * value to a mock object. This class should not need to be tested outside * this package. */ WebResourceFetcher webResourceFetcher; /** * The object used to access the download directory where dump files are * stored. This is stored permanently here so that tests in this package can * set the value to a mock object. This class should not need to be tested * outside this package. */ DirectoryManager downloadDirectoryManager; final DocumentDataFilter filter = new DocumentDataFilter(); /** * Creates a new DumpFileProcessingController for the project of the given * name. By default, the dump file directory will be assumed to be in the * current directory and the object will access the Web to fetch the most * recent files. * * @param projectName * Wikimedia projectname, e.g., "wikidatawiki" or "enwiki" */ public DumpProcessingController(String projectName) { this.projectName = projectName; this.entityDocumentProcessors = new HashMap<>(); this.mwRevisionProcessors = new HashMap<>(); try { setDownloadDirectory(System.getProperty("user.dir")); } catch (IOException e) { // The user.dir should always exist, so this is highly unusual. throw new RuntimeException(e.toString(), e); } setOfflineMode(false); } /** * Sets the directory where dumpfiles are stored locally. If it does not * exist yet, this directory will be created. Dumpfiles will later be stored * in a subdirectory "dumpfiles", but this will only be created when needed. * * @param downloadDirectory * the download base directory * @throws IOException * if the existence of the directory could not be checked or if * it did not exists and could not be created either */ public void setDownloadDirectory(String downloadDirectory) throws IOException { this.downloadDirectoryManager = DirectoryManagerFactory .createDirectoryManager(downloadDirectory, false); } /** * Disables or enables Web access. * * @param offlineModeEnabled * if true, all Web access is disabled and only local files will * be processed */ public void setOfflineMode(boolean offlineModeEnabled) { if (offlineModeEnabled) { this.webResourceFetcher = null; } else { this.webResourceFetcher = new WebResourceFetcherImpl(); } } /** * Sets a property filter. If given, all data will be preprocessed to * contain only statements for the given (main) properties. * * @see DocumentDataFilter#setPropertyFilter(Set) * @param propertyFilter * set of properties that should be retained (can be empty) */ public void setPropertyFilter(Set propertyFilter) { this.filter.setPropertyFilter(propertyFilter); } /** * Sets a site link filter. If given, all data will be preprocessed to * contain only data for the given site keys. * * @see DocumentDataFilter#setSiteLinkFilter(Set) * @param siteLinkFilter * set of siteLinks that should be retained (can be empty) */ public void setSiteLinkFilter(Set siteLinkFilter) { this.filter.setSiteLinkFilter(siteLinkFilter); } /** * Sets a language filter. If given, all data will be preprocessed to * contain only data for the given languages. * * @see DocumentDataFilter#setLanguageFilter(Set) * @param languageFilter * set of language codes that should be retained (can be empty) */ public void setLanguageFilter(Set languageFilter) { this.filter.setLanguageFilter(languageFilter); } /** * Registers an MwRevisionProcessor, which will henceforth be notified of * all revisions that are encountered in the dump. *

* This only is used when processing dumps that contain revisions. In * particular, plain JSON dumps contain no revision information. *

* Importantly, the {@link MwRevision} that the registered processors will * receive is valid only during the execution of * {@link MwRevisionProcessor#processRevision(MwRevision)}, but it will not * be permanent. If the data is to be retained permanently, the revision * processor needs to make its own copy. * * @param mwRevisionProcessor * the revision processor to register * @param model * the content model that the processor is registered for; it * will only be notified of revisions in that model; if null is * given, all revisions will be processed whatever their model * @param onlyCurrentRevisions * if true, then the subscriber is only notified of the most * current revisions; if false, then it will receive all * revisions, current or not */ public void registerMwRevisionProcessor( MwRevisionProcessor mwRevisionProcessor, String model, boolean onlyCurrentRevisions) { registerProcessor(mwRevisionProcessor, model, onlyCurrentRevisions, this.mwRevisionProcessors); } /** * Registers an EntityDocumentProcessor, which will henceforth be notified * of all entity documents that are encountered in the dump. *

* It is possible to register processors for specific content types and to * use either all revisions or only the most current ones. This * functionality is only available when processing dumps that contain this * information. In particular, plain JSON dumps do not specify content * models at all and have only one (current) revision of each entity. * * @param entityDocumentProcessor * the entity document processor to register * @param model * the content model that the processor is registered for; it * will only be notified of revisions in that model; if null is * given, all revisions will be processed whatever their model * @param onlyCurrentRevisions * if true, then the subscriber is only notified of the most * current revisions; if false, then it will receive all * revisions, current or not */ public void registerEntityDocumentProcessor( EntityDocumentProcessor entityDocumentProcessor, String model, boolean onlyCurrentRevisions) { registerProcessor(entityDocumentProcessor, model, onlyCurrentRevisions, this.entityDocumentProcessors); } /** * Processes the most recent dump of the sites table to extract information * about registered sites. * * @return a Sites objects that contains the extracted information, or null * if no sites dump was available (typically in offline mode without * having any previously downloaded sites dumps) * @throws IOException * if there was a problem accessing the sites table dump or the * dump download directory */ public Sites getSitesInformation() throws IOException { MwDumpFile sitesTableDump = getMostRecentDump(DumpContentType.SITES); if (sitesTableDump == null) { return null; } // Create a suitable processor for such dumps and process the file: MwSitesDumpFileProcessor sitesDumpFileProcessor = new MwSitesDumpFileProcessor(); sitesDumpFileProcessor.processDumpFileContents( sitesTableDump.getDumpFileStream(), sitesTableDump); return sitesDumpFileProcessor.getSites(); } /** * Processes all relevant page revision dumps in order. The registered * listeners (MwRevisionProcessor or EntityDocumentProcessor objects) will * be notified of all data they registered for. *

* Note that this method may not always provide reliable results since * single incremental dump files are sometimes missing, even if earlier and * later incremental dumps are available. In such a case, processing all * recent dumps will miss some (random) revisions, thus reflecting a state * that the wiki has never really been in. It might thus be preferable to * process only a single (main) dump file without any incremental dumps. * * @see DumpProcessingController#processMostRecentMainDump() * @see DumpProcessingController#processDump(MwDumpFile) * @see DumpProcessingController#getMostRecentDump(DumpContentType) */ public void processAllRecentRevisionDumps() { WmfDumpFileManager wmfDumpFileManager = getWmfDumpFileManager(); if (wmfDumpFileManager == null) { return; } MwDumpFileProcessor dumpFileProcessor = getRevisionDumpFileProcessor(); for (MwDumpFile dumpFile : wmfDumpFileManager .findAllRelevantRevisionDumps(this.preferCurrent)) { processDumpFile(dumpFile, dumpFileProcessor); } } /** * Processes the most recent main (complete) dump that is available. * Convenience method: same as retrieving a dump with * {@link #getMostRecentDump(DumpContentType)} with * {@link DumpContentType#CURRENT} or {@link DumpContentType#FULL}, and * processing it with {@link #processDump(MwDumpFile)}. The individual * methods should be used for better control and error handling. * * @see DumpProcessingController#processAllRecentRevisionDumps() */ public void processMostRecentMainDump() { DumpContentType dumpContentType; if (this.preferCurrent) { dumpContentType = DumpContentType.CURRENT; } else { dumpContentType = DumpContentType.FULL; } processDump(getMostRecentDump(dumpContentType)); } /** * Processes the most recent main (complete) dump in JSON form that is * available. Convenience method: same as retrieving a dump with * {@link #getMostRecentDump(DumpContentType)} with * {@link DumpContentType#JSON}, and processing it with * {@link #processDump(MwDumpFile)}. The individual methods should be used * for better control and error handling. * * @see DumpProcessingController#processAllRecentRevisionDumps() */ public void processMostRecentJsonDump() { processDump(getMostRecentDump(DumpContentType.JSON)); } /** * Processes the contents of the given dump file. All registered processor * objects will be notified of all data. Note that JSON dumps do not * contains any revision information, so that registered * {@link MwRevisionProcessor} objects will not be notified in this case. * Dumps of type {@link DumpContentType#SITES} cannot be processed with this * method; use {@link #getSitesInformation()} to process these dumps. * * @param dumpFile * the dump to process */ public void processDump(MwDumpFile dumpFile) { if (dumpFile == null) { return; } MwDumpFileProcessor dumpFileProcessor; switch (dumpFile.getDumpContentType()) { case CURRENT: case DAILY: case FULL: dumpFileProcessor = getRevisionDumpFileProcessor(); break; case JSON: dumpFileProcessor = getJsonDumpFileProcessor(); break; case SITES: default: logger.error("Dumps of type " + dumpFile.getDumpContentType() + " cannot be processed as entity-document dumps."); return; } processDumpFile(dumpFile, dumpFileProcessor); } /** * Returns a handler for the most recent dump file of the given type that is * available (under the current settings), or null if no dump file of this * type could be retrieved. * * @param dumpContentType * the type of the dump, e.g., {@link DumpContentType#JSON} * @return the most recent dump, or null if none was found */ public MwDumpFile getMostRecentDump(DumpContentType dumpContentType) { WmfDumpFileManager wmfDumpFileManager = getWmfDumpFileManager(); if (wmfDumpFileManager == null) { return null; } else { MwDumpFile result = wmfDumpFileManager .findMostRecentDump(dumpContentType); if (result == null) { logger.warn("Could not find any dump of type " + dumpContentType.toString() + "."); } return result; } } /** * Processes one dump file with the given dump file processor, handling * exceptions appropriately. * * @param dumpFile * the dump file to process * @param dumpFileProcessor * the dump file processor to use */ void processDumpFile(MwDumpFile dumpFile, MwDumpFileProcessor dumpFileProcessor) { try (InputStream inputStream = dumpFile.getDumpFileStream()) { dumpFileProcessor.processDumpFileContents(inputStream, dumpFile); } catch (FileAlreadyExistsException e) { logger.error("Dump file " + dumpFile.toString() + " could not be processed since file " + e.getFile() + " already exists. Try deleting the file or dumpfile directory to attempt a new download."); } catch (IOException e) { logger.error("Dump file " + dumpFile.toString() + " could not be processed: " + e.toString()); } } /** * Returns a WmfDumpFileManager based on the current settings. This object * can be used to get direct access to dump files, e.g., to gather more * information. Most basic operations can also be performed using the * interface of the {@link DumpProcessingController} and this is often * preferable. *

* This dump file manager will not be updated if the settings change later. * * @return a WmfDumpFileManager for the current settings or null if there * was a problem (e.g., since the current dump file directory could * not be accessed) */ public WmfDumpFileManager getWmfDumpFileManager() { try { return new WmfDumpFileManager(this.projectName, this.downloadDirectoryManager, this.webResourceFetcher); } catch (IOException e) { logger.error("Could not create dump file manager: " + e.toString()); return null; } } /** * Return the main dump file processor that should be used to process * revisions. * * @return the main MwDumpFileProcessor for revisions */ MwDumpFileProcessor getRevisionDumpFileProcessor() { return new MwRevisionDumpFileProcessor(getMasterMwRevisionProcessor()); } /** * Return the main dump file processor that should be used to process the * content of JSON dumps. * * @return the main MwDumpFileProcessor for JSON */ MwDumpFileProcessor getJsonDumpFileProcessor() { return new JsonDumpFileProcessor(getMasterEntityDocumentProcessor(), Datamodel.SITE_WIKIDATA); } /** * Stores a registered processor object in a map of processors. Used * internally to keep {@link EntityDocumentProcessor} and * {@link MwRevisionProcessor} objects. * * @param processor * the processor object to register * @param model * the content model that the processor is registered for; it * will only be notified of revisions in that model; if null is * given, all revisions will be processed whatever their model * @param onlyCurrentRevisions * if true, then the subscriber is only notified of the most * current revisions; if false, then it will receive all * revisions, current or not * @param processors * the map of lists of processors to store the processor in */ private void registerProcessor(T processor, String model, boolean onlyCurrentRevisions, Map> processors) { this.preferCurrent = this.preferCurrent && onlyCurrentRevisions; ListenerRegistration listenerRegistration = new ListenerRegistration( model, onlyCurrentRevisions); if (!processors.containsKey(listenerRegistration)) { processors.put(listenerRegistration, new ArrayList<>()); } processors.get(listenerRegistration).add(processor); } /** * Returns an {@link EntityDocumentProcessor} object that calls all * registered processors and that takes filters into account if needed. * * @return the master processor */ private EntityDocumentProcessor getMasterEntityDocumentProcessor() { EntityDocumentProcessor result = null; EntityDocumentProcessorBroker broker = null; for (Map.Entry> entry : this.entityDocumentProcessors .entrySet()) { for (EntityDocumentProcessor edp : entry.getValue()) { if (result == null) { result = edp; } else { if (broker == null) { broker = new EntityDocumentProcessorBroker(); broker.registerEntityDocumentProcessor(result); result = broker; } broker.registerEntityDocumentProcessor(edp); } } } return filterEntityDocumentProcessor(result); } /** * Wraps the given processor into a {@link EntityDocumentProcessorFilter} if * global filters are configured; otherwise just returns the processor * unchanged. * * @param processor * the processor to wrap */ private EntityDocumentProcessor filterEntityDocumentProcessor( EntityDocumentProcessor processor) { if (this.filter.getPropertyFilter() == null && this.filter.getSiteLinkFilter() == null && this.filter.getLanguageFilter() == null) { return processor; } else { return new EntityDocumentProcessorFilter( processor, this.filter); } } /** * Returns an {@link MwRevisionProcessor} object that calls all registered * processors and that takes filters into account if needed. * * @return the master processor */ private MwRevisionProcessor getMasterMwRevisionProcessor() { MwRevisionProcessorBroker result = new MwRevisionProcessorBroker(); for (Entry> entry : this.mwRevisionProcessors .entrySet()) { for (MwRevisionProcessor mrp : entry.getValue()) { result.registerMwRevisionProcessor(mrp, entry.getKey().model, entry.getKey().onlyCurrentRevisions); } } for (Map.Entry> edpEntry : this.entityDocumentProcessors .entrySet()) { EntityDocumentProcessor resultEdp; if (edpEntry.getValue().size() == 1) { resultEdp = edpEntry.getValue().get(0); } else { EntityDocumentProcessorBroker edpb = new EntityDocumentProcessorBroker(); for (EntityDocumentProcessor edp : edpEntry.getValue()) { edpb.registerEntityDocumentProcessor(edp); } resultEdp = edpb; } result.registerMwRevisionProcessor(new WikibaseRevisionProcessor( filterEntityDocumentProcessor(resultEdp), Datamodel.SITE_WIKIDATA), edpEntry.getKey().model, edpEntry .getKey().onlyCurrentRevisions); } return result; } } EntityTimerProcessor.java000066400000000000000000000114341444772566300346700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 - 2016 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentDumpProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.util.Timer; /** * Simple {@link EntityDocumentProcessor} for basic counting and time keeping. * It will print statistics on elapsed time and processed entities once in a * while. The class also supports a timeout mechanism: if a timeout time (in * seconds) is given, then a {@link EntityTimerProcessor.TimeoutException} * (unchecked) will be thrown soon after this many seconds have passed. This can * be used to abort processing in a relatively clean way by catching this * exception at a higher level. * * @author Markus Kroetzsch * */ public class EntityTimerProcessor implements EntityDocumentDumpProcessor { static final Logger logger = LoggerFactory .getLogger(EntityTimerProcessor.class); final Timer timer = Timer.getNamedTimer("EntityTimerProcessor"); final int timeout; int entityCount = 0; int lastSeconds = 0; /** * Number of seconds after which a progress report is printed. If a timeout * is configured, it will only be checked at a report. */ int reportInterval = 10; /** * Constructor. * * @param timeout * the timeout in seconds or 0 if no timeout should be used */ public EntityTimerProcessor(int timeout) { this.timeout = timeout; } /** * Sets the interval after which the timer should report progress. By * default, this is ten seconds. When using a timeout, the timeout condition * will only be checked at this interval, too, so using a very large value * would lead to increasing imprecision with the timeout. The timer does not * use a separate thread, and reports will only be generated after an entity * was fully processed. Thus, very long processing times would also affect * the accuracy of the interval. * * @param seconds * time after which progress should be reported. */ public void setReportInterval(int seconds) { if (seconds <= 0) { throw new IllegalArgumentException( "The report interval must be a non-zero, positive number of seconds."); } this.reportInterval = seconds; } @Override public void processItemDocument(ItemDocument itemDocument) { countEntity(); } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { countEntity(); } @Override public void open() { // Nothing to do. We only start the timer when the first entity is // really processed. } /** * Stops the processing and prints the final time. */ @Override public void close() { logger.info("Finished processing."); this.timer.stop(); this.lastSeconds = (int) (timer.getTotalWallTime() / 1000000000); printStatus(); } /** * Counts one entity. Every once in a while, the current time is checked so * as to print an intermediate report roughly every ten seconds. */ private void countEntity() { if (!this.timer.isRunning()) { startTimer(); } this.entityCount++; if (this.entityCount % 100 == 0) { timer.stop(); int seconds = (int) (timer.getTotalWallTime() / 1000000000); if (seconds >= this.lastSeconds + this.reportInterval) { this.lastSeconds = seconds; printStatus(); if (this.timeout > 0 && seconds > this.timeout) { logger.info("Timeout. Aborting processing."); throw new TimeoutException(); } } timer.start(); } } /** * Prints the current status, time and entity count. */ private void printStatus() { logger.info("Processed " + this.entityCount + " entities in " + this.lastSeconds + " sec" + (this.lastSeconds > 0 ? " (" + (this.entityCount / this.lastSeconds) + " per second)" : "")); } private void startTimer() { logger.info("Starting processing."); this.timer.start(); } public static class TimeoutException extends RuntimeException { private static final long serialVersionUID = -1083533602730765194L; } } JsonDumpFileProcessor.java000066400000000000000000000141151444772566300347510ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import com.fasterxml.jackson.databind.DeserializationFeature; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl; import org.wikidata.wdtk.datamodel.interfaces.*; import com.fasterxml.jackson.core.JsonParser.Feature; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.MappingIterator; import com.fasterxml.jackson.databind.ObjectReader; /** * Processor for JSON dumpfiles. * * @author Markus Kroetzsch * */ public class JsonDumpFileProcessor implements MwDumpFileProcessor { static final Logger logger = LoggerFactory .getLogger(JsonDumpFileProcessor.class); private final ObjectReader documentReader; private final EntityDocumentProcessor entityDocumentProcessor; public JsonDumpFileProcessor( EntityDocumentProcessor entityDocumentProcessor, String siteIri) { this.entityDocumentProcessor = entityDocumentProcessor; this.documentReader = new DatamodelMapper(siteIri) .readerFor(EntityDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); } /** * Process dump file data from the given input stream. This method uses the * efficient Jackson {@link MappingIterator}. However, this class cannot * recover from processing errors. If an error occurs in one entity, the * (presumably) less efficient processing method * {@link #processDumpFileContentsRecovery(InputStream)} is used instead. * * @see MwDumpFileProcessor#processDumpFileContents(InputStream, MwDumpFile) */ @Override public void processDumpFileContents(InputStream inputStream, MwDumpFile dumpFile) { logger.info("Processing JSON dump file " + dumpFile.toString()); try { try { MappingIterator documentIterator = documentReader.readValues(inputStream); documentIterator.getParser().disable(Feature.AUTO_CLOSE_SOURCE); while (documentIterator.hasNextValue()) { EntityDocument document = documentIterator.nextValue(); handleDocument(document); } documentIterator.close(); } catch (JsonProcessingException e) { logJsonProcessingException(e); processDumpFileContentsRecovery(inputStream); } } catch (IOException e) { throw new RuntimeException("Cannot read JSON input: " + e.getMessage(), e); } } /** * Reports the error of a JSON processing exception that was caught when * trying to read an entity. * * @param exception * the exception to log */ private void logJsonProcessingException(JsonProcessingException exception) { JsonDumpFileProcessor.logger .error("Error when reading JSON for entity: " + exception.getMessage()); } /** * Handles a {@link EntityDocument} that was retrieved by * parsing the JSON input. It will call appropriate processing methods * depending on the type of document. * * @param document * the document to process */ private void handleDocument(EntityDocument document) { if (document instanceof ItemDocument) { this.entityDocumentProcessor .processItemDocument((ItemDocument) document); } else if (document instanceof PropertyDocument) { this.entityDocumentProcessor .processPropertyDocument((PropertyDocument) document); } else if(document instanceof LexemeDocument) { this.entityDocumentProcessor .processLexemeDocument((LexemeDocument) document); } else if(document instanceof MediaInfoDocument) { this.entityDocumentProcessor .processMediaInfoDocument((MediaInfoDocument) document); } } /** * Process dump file data from the given input stream. The method can * recover from an errors that occurred while processing an input stream, * which is assumed to contain the JSON serialization of a list of JSON * entities, with each entity serialization in one line. To recover from the * previous error, the first line is skipped. * * @param inputStream * the stream to read from * @throws IOException * if there is a problem reading the stream */ private void processDumpFileContentsRecovery(InputStream inputStream) throws IOException { JsonDumpFileProcessor.logger .warn("Entering recovery mode to parse rest of file. This might be slightly slower."); BufferedReader br = new BufferedReader(new InputStreamReader( inputStream)); String line = br.readLine(); if (line == null) { // can happen if iterator already has consumed all // the stream return; } if (line.length() >= 100) { line = line.substring(0, 100) + "[...]" + line.substring(line.length() - 50); } JsonDumpFileProcessor.logger.warn("Skipping rest of current line: " + line); line = br.readLine(); while (line != null && line.length() > 1) { try { EntityDocument document; if (line.charAt(line.length() - 1) == ',') { document = documentReader.readValue(line.substring(0, line.length() - 1)); } else { document = documentReader.readValue(line); } handleDocument(document); } catch (JsonProcessingException e) { logJsonProcessingException(e); JsonDumpFileProcessor.logger.error("Problematic line was: " + line.substring(0, Math.min(50, line.length())) + "..."); } line = br.readLine(); } } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/MwDumpFile.java000066400000000000000000000072121444772566300326020ustar00rootroot00000000000000package org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.util.Comparator; /** * Representation of MediaWiki dump files, which provides access to important * basic properties of dumps, and to the content of the dump itself. * * @author Markus Kroetzsch * */ public interface MwDumpFile { /** * Comparator to sort dumps by date. * * @author Markus Kroetzsch * */ class DateComparator implements Comparator { @Override public int compare(MwDumpFile a, MwDumpFile b) { return a.getDateStamp().compareTo(b.getDateStamp()); } } /** * Checks if the dump is actually available. Should be called before * {@link #getDumpFileReader()}. Depending on the type of dumpfile, this * will trigger one or more checks to make sure that all relevant data can * be accessed for this dump file. This is still no definite guarantee that * the download will succeed, since there can always be IO errors anyway, * but it helps to detect cases where the dump is clearly not in a usable * state. * * @return true if the dump file is likely to be available */ boolean isAvailable(); /** * Returns the project name for this dump. Together with the dump content * type and date stamp, this identifies the dump, and it is therefore always * available. * * @return a project name string */ String getProjectName(); /** * Returns the date stamp for this dump. Together with the project name and * dump content type, this identifies the dump, and it is therefore always * available. * * @return a string that represents a date in format YYYYMMDD */ String getDateStamp(); /** * Returns information about the content of the dump. Together with the * project name and date stamp, this identifies the dump, and it is * therefore always available. * * @return the content type of this dump */ DumpContentType getDumpContentType(); /** * Returns an input stream that provides access to the (uncompressed) text * content of the dump file. *

* It is important to close the stream after use. * * @return an input stream to read the dump file * @throws IOException * if the dump file contents could not be accessed */ InputStream getDumpFileStream() throws IOException; /** * Returns a buffered reader that provides access to the (uncompressed) text * content of the dump file. *

* It is important to close the reader after use. * * @return a buffered reader to read the dump file * @throws IOException * if the dump file contents could not be accessed */ BufferedReader getDumpFileReader() throws IOException; /** * Prepares the dump file for access via {@link #getDumpFileStream()} or * {@link #getDumpFileReader()}. In particular, this will download any * remote files. * * @throws IOException * if there was a problem preparing the files */ void prepareDumpFile() throws IOException; } MwDumpFileProcessor.java000066400000000000000000000025431444772566300344250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.InputStream; /** * General interface for classes that process dump files, typically for parsing * them. * * @author Markus Kroetzsch * */ public interface MwDumpFileProcessor { /** * Process dump file data from the given input stream. *

* The input stream is obtained from the given dump file via * {@link MwDumpFile#getDumpFileStream()}. It will be closed by the * caller. * * @param inputStream * to access the contents of the dump * @param dumpFile * to access further information about this dump */ void processDumpFileContents(InputStream inputStream, MwDumpFile dumpFile); } MwDumpFormatException.java000066400000000000000000000022071444772566300347520ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception class to report errors in the format of a MediaWiki dump file. * * @author Markus Kroetzsch * */ public class MwDumpFormatException extends Exception { private static final long serialVersionUID = 8281842207514453147L; /** * Constructs a new exception with the given message. * * @param message * the message string */ public MwDumpFormatException(String message) { super(message); } } MwLocalDumpFile.java000066400000000000000000000170101444772566300334730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.util.DirectoryManager; import org.wikidata.wdtk.util.DirectoryManagerFactory; import org.wikidata.wdtk.dumpfiles.wmf.WmfDumpFile; /** * Class for representing dump files that are found at arbitrary (local) file * paths. The meta-data for the dump file (content type, time stamp, etc.) can * be set explicitly, or be guessed from the file name (to the extent possible). * * @author Markus Damm * @author Markus Kroetzsch */ public class MwLocalDumpFile implements MwDumpFile { static final Logger logger = LoggerFactory.getLogger(MwLocalDumpFile.class); /** * Date stamp when the dump file was created. If there is no date stamp * given or found, it is set to "YYYYMMDD" */ final String dateStamp; /** * Project name of the dump file */ final String projectName; /** * Name of the dump file in the file system */ final String dumpFileName; /** * Absolute path to the dump file */ final Path dumpFilePath; /** * Type of this dumpfile */ final DumpContentType dumpContentType; /** * DirectoryManager for accessing the dumpfile */ DirectoryManager directoryManager; /** * True if the given file is available (exists). */ final boolean isAvailable; /** * Hash map defining the compression type of each type of dump. */ static final Map COMPRESSION_TYPE = new HashMap<>(); static { MwLocalDumpFile.COMPRESSION_TYPE.put(DumpContentType.DAILY, CompressionType.BZ2); MwLocalDumpFile.COMPRESSION_TYPE.put(DumpContentType.CURRENT, CompressionType.BZ2); MwLocalDumpFile.COMPRESSION_TYPE.put(DumpContentType.FULL, CompressionType.BZ2); MwLocalDumpFile.COMPRESSION_TYPE.put(DumpContentType.SITES, CompressionType.GZIP); MwLocalDumpFile.COMPRESSION_TYPE.put(DumpContentType.JSON, CompressionType.GZIP); } /** * Constructor. The DumpContentType will be inferred by the name of the * file, if possible. If it is not possible, it will be set to JSON by * default. * * @param filepath * Path to the dump file in the file system */ public MwLocalDumpFile(String filepath) { this(filepath, null, null, null); } /** * Constructor. * * @param filePath * Path to the dump file in the file system * @param dumpContentType * DumpContentType of the dump file, or null if not known to * guess it from file name; this information is essential to * invoke the correct processing code to read the dump file * @param dateStamp * dump date in format YYYYMMDD, or null if not known to guess it * from file name; this is mainly used for logs and messages * @param projectName * project name string, or null to use a default string; this is * mainly used for logs and messages */ public MwLocalDumpFile(String filePath, DumpContentType dumpContentType, String dateStamp, String projectName) { this.dumpFilePath = Paths.get(filePath).toAbsolutePath(); this.dumpFileName = this.dumpFilePath.getFileName().toString(); try { this.directoryManager = DirectoryManagerFactory .createDirectoryManager(this.dumpFilePath.getParent(), true); } catch (IOException e) { this.directoryManager = null; logger.error("Could not access local dump file: " + e.toString()); } if (dumpContentType == null) { this.dumpContentType = guessDumpContentType(this.dumpFileName); } else { this.dumpContentType = dumpContentType; } if (dateStamp == null) { this.dateStamp = guessDumpDate(this.dumpFileName); } else { this.dateStamp = dateStamp; } if (projectName == null) { this.projectName = "LOCAL"; } else { this.projectName = projectName; } this.isAvailable = this.directoryManager != null && this.directoryManager.hasFile(this.dumpFileName); } /** * Returns the absolute path to this dump file. * * @return path */ public Path getPath() { return this.dumpFilePath; } @Override public boolean isAvailable() { return this.isAvailable; } @Override public String getProjectName() { return this.projectName; } @Override public String getDateStamp() { return this.dateStamp; } @Override public DumpContentType getDumpContentType() { return this.dumpContentType; } @Override public InputStream getDumpFileStream() throws IOException { if (!isAvailable()) { throw new IOException("Local dump file \"" + this.dumpFilePath.toString() + "\" is not available for reading."); } return this.directoryManager.getInputStreamForFile(this.dumpFileName, WmfDumpFile.getDumpFileCompressionType(dumpFileName)); } @Override public BufferedReader getDumpFileReader() throws IOException { return new BufferedReader(new InputStreamReader(getDumpFileStream(), StandardCharsets.UTF_8)); } @Override public void prepareDumpFile() { // nothing to do } @Override public String toString() { return this.dumpFilePath.toString() + " (" + this.projectName + "/" + getDumpContentType().toString().toLowerCase() + "/" + this.dateStamp + ")"; } /** * Guess the type of the given dump from its filename. * * @param fileName * @return dump type, defaulting to JSON if no type was found */ private static DumpContentType guessDumpContentType(String fileName) { String lcDumpName = fileName.toLowerCase(); if (lcDumpName.contains(".json.gz")) { return DumpContentType.JSON; } else if (lcDumpName.contains(".json.bz2")) { return DumpContentType.JSON; } else if (lcDumpName.contains(".sql.gz")) { return DumpContentType.SITES; } else if (lcDumpName.contains(".xml.bz2")) { if (lcDumpName.contains("daily")) { return DumpContentType.DAILY; } else if (lcDumpName.contains("current")) { return DumpContentType.CURRENT; } else { return DumpContentType.FULL; } } else { logger.warn("Could not guess type of the dump file \"" + fileName + "\". Defaulting to json.gz."); return DumpContentType.JSON; } } /** * Guess the date of the dump from the given dump file name. * * @param fileName * @return 8-digit date stamp or YYYYMMDD if none was found */ private static String guessDumpDate(String fileName) { Pattern p = Pattern.compile("([0-9]{8})"); Matcher m = p.matcher(fileName); if (m.find()) { return m.group(1); } else { logger.info("Could not guess date of the dump file \"" + fileName + "\". Defaulting to YYYYMMDD."); return "YYYYMMDD"; } } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/MwRevision.java000066400000000000000000000163241444772566300326770ustar00rootroot00000000000000package org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Representation of one revision of a page in MediaWiki. * * @author Markus Kroetzsch * */ public interface MwRevision { /** * The model used for MediaWiki revisions in traditional Wikitext. Revisions * with this format should always use "text/x-wiki" as their format. */ String MODEL_WIKITEXT = "wikitext"; /** * The model used for MediaWiki revisions representing Wikibase items. * Revisions with this format should always use "application/json" as their * format. */ String MODEL_WIKIBASE_ITEM = "wikibase-item"; /** * The model used for MediaWiki revisions representing Wikibase properties. * Revisions with this format should always use "application/json" as their * format. */ String MODEL_WIKIBASE_PROPERTY = "wikibase-property"; /** * The model used for MediaWiki revisions representing Wikibase lexemes. * Revisions with this format should always use "application/json" as their * format. */ String MODEL_WIKIBASE_LEXEME = "wikibase-lexeme"; /** * The revision id set when the revision does not exist */ long NO_REVISION_ID = -1; /** * Returns the title string of the revised page, including namespace * prefixes and subpages, if any. The string is formatted as it would be on * an HTML page and not as in the URL used by MediaWiki for the page. For * example, spaces are represented as spaces and not as underscores. For * example *

* On a single MediaWiki site, the prefixed page title is a key for a page * at any given moment. However, users may change the title and namespace by * moving pages. The page id provides a better clue to identify pages across * history. * * @return title string */ String getPrefixedTitle(); /** * Returns the title string of the revised page without any namespace * prefixes. The string is formatted as it would be on an HTML page and not * as in the URL used by MediaWiki for the page. For example, spaces are * represented as spaces and not as underscores. For example *

* On a single MediaWiki site, the combination of page title and page * namespace is a key for a page at any given moment. However, users may * change the title and namespace by moving pages. The page id provides a * better clue to identify pages across history. * * @return title string */ String getTitle(); /** * Returns the id of the MediaWiki namespace of the revised page. The * meaning of this id depends on the configuration of the site that the page * is from. Usually, 0 is the main namespace. Even ids usually refer to * normal article pages while their odd successors represent the * corresponding talk namespace. *

* On a single MediaWiki site, the combination of page title and page * namespace is a key for a page at any given moment. However, users may * change the title and namespace by moving pages. The page id provides a * better clue to identify pages across history. * * @return integer namespace id */ int getNamespace(); /** * Returns the numeric page id of the revised page. For any given MediaWiki * site, pages are uniquely identified by their page id. MediaWiki will try * to preserve the page id even across title changes (moves). * * @return integer page id */ int getPageId(); /** * Returns the numeric id of the current revision. For any given MediaWiki * site, revisions are uniquely identified by their revision id. In * particular, two distinct revisions can never have the same id, even if * they belong to different pages. * * @return long revision id */ long getRevisionId(); /** * Returns the numeric id of the parent revision. * It is the id of the revision the current revision is based on. * It returns -1 if there is no parent revision i.e. the page * have been created. * * @return revision id */ long getParentRevisionId(); /** * Returns the time stamp at which the current revision was made. The time * stamp is a string that is formatted according to ISO 8601, such as * "2014-02-19T23:34:16Z". * * @return time stamp string */ String getTimeStamp(); /** * Returns the text content of the current revision. Traditionally, this is * a wiki text that is edited by users. More recently, however, other * formats, such as JSON, have been introduced by extensions like Wikibase. * The format of the text is specified by {@link #getFormat()}. To interpret * it properly, one should also know the content model, obtained from * {@link #getModel()}. * * @return text content of the revision */ String getText(); /** * Returns the content model of the revision. This specifies how the text * content should be interpreted. Content models are usually configured for * namespaces and thus remain rather stable across the history of a page. * However, a page could in principle change its content model over time and * every revision therefore specifies its own content model. All known * models require a single format, obtained from {@link #getFormat()}. * * @return content model as a string */ String getModel(); /** * Returns the format of the revision text. This string should be formatted * as a MIME media type. Typical examples are "application/json" (JSON) and * "text/x-wiki" (MediaWiki wikitext). To interpret the meaning of this * format, one should also consider the content model obtained by * {@link #getModel()}. Like the content model, the format might change * between revisions of a page, but this is very rare in practice. * * @return MIME type for revision text */ String getFormat(); /** * Returns the comment string that was used for making the edit that led to * this revision. * * @return comment string */ String getComment(); /** * Returns the name for the contributor that made the edit that led to this * revision. This might be a user name or an IP address. This can be checked * using {@link #hasRegisteredContributor()}. * * @return contributor name or IP address */ String getContributor(); /** * Returns the user id of the contributor who made the edit that led to this * revision, or -1 if the edit was not made by a registered user. * * @return user id or -1 for anonymous users */ int getContributorId(); /** * Returns true if the contributor who made the edit that led to this * revision was logged in with a user account. False is returned if the * contributor was not logged in (in which case there is only an IP * address). * * @return true if the contributor was looged in */ boolean hasRegisteredContributor(); } MwRevisionDumpFileProcessor.java000066400000000000000000000401171444772566300361430ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.InputStream; import java.util.HashMap; import java.util.Map; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class processes MediaWiki dumpfiles that contain lists of page revisions * in the specific XML format used by MediaWiki for exporting pages. It extracts * all revisions and forwards them to any registered revision processor. The * class also keeps track of whether or not a certain article respectively * revision has already been encountered. Therefore, no revision is processed * twice and the registered revision processors can be informed whether the * revision is the first of the given article or not. The first revision of an * article that is encountered in a MediaWiki dump file is usually the most * recent one. If multiple dump files are processed in reverse chronological * order, the first revision that is encountered is also the most recent one * overall. * * @author Markus Kroetzsch * */ public class MwRevisionDumpFileProcessor implements MwDumpFileProcessor { static final String E_MEDIAWIKI = "mediawiki"; static final String E_SITEINFO = "siteinfo"; static final String E_SITENAME = "sitename"; static final String E_BASEURL = "base"; static final String E_NAMESPACE = "namespace"; static final String A_NSKEY = "key"; static final String E_PAGE = "page"; static final String E_PAGE_TITLE = "title"; static final String E_PAGE_ID = "id"; static final String E_PAGE_NAMESPACE = "ns"; static final String E_PAGE_REVISION = "revision"; static final String E_PAGE_REDIRECT = "redirect"; static final String E_REV_ID = "id"; static final String E_REV_PARENT_ID = "parentid"; static final String E_REV_TIMESTAMP = "timestamp"; static final String E_REV_COMMENT = "comment"; static final String E_REV_MODEL = "model"; static final String E_REV_TEXT = "text"; static final String E_REV_CONTRIBUTOR = "contributor"; static final String E_REV_FORMAT = "format"; static final String E_REV_SHA1 = "sha1"; static final String E_REV_MINOR = "minor"; static final String E_CONTRIBUTOR_NAME = "username"; static final String E_CONTRIBUTOR_ID = "id"; static final String E_CONTRIBUTOR_IP = "ip"; static final Logger logger = LoggerFactory .getLogger(MwRevisionDumpFileProcessor.class); final XMLInputFactory xmlFactory; XMLStreamReader xmlReader; /** * Map from integer namespace ids to namespace prefixes. Namespace strings * do not include the final ":" used in MediaWiki to separate namespace * prefixes from article titles. Moreover, the prefixes use spaces, not * underscores as in MediaWiki URLs. */ final Map namespaces; /** * Name of the site as set in the dump file. */ String sitename = ""; /** * Base URL of the site as set in the dump file. */ String baseUrl = ""; /** * Object used to store data about the current revision. */ final MwRevisionImpl mwRevision; /** * Object used to report all revisions to. */ final MwRevisionProcessor mwRevisionProcessor; /** * Constructor. * * @param mwRevisionProcessor * the revision processor to which all revisions will be reported */ public MwRevisionDumpFileProcessor(MwRevisionProcessor mwRevisionProcessor) { this.xmlFactory = XMLInputFactory.newInstance(); this.namespaces = new HashMap<>(); this.mwRevision = new MwRevisionImpl(); this.mwRevisionProcessor = mwRevisionProcessor; reset(); } /** * Resets the internal state of the object. All information gathered from * previously processed dumps and all related statistics will be forgotten. * If this method is not called, then consecutive invocations of * {@link #processDumpFileContents(InputStream, MwDumpFile)} will continue * to add to the internal state. This is useful for processing dumps that * are split into several parts. *

* This will not unregister any MwRevisionProcessors. */ public void reset() { this.namespaces.clear(); } @Override public void processDumpFileContents(InputStream inputStream, MwDumpFile dumpFile) { logger.info("Processing revision dump file " + dumpFile.toString()); this.namespaces.clear(); this.sitename = ""; this.baseUrl = ""; this.xmlReader = null; try { this.xmlReader = this.xmlFactory.createXMLStreamReader(inputStream); processXmlMediawiki(); } catch (XMLStreamException | MwDumpFormatException e) { MwRevisionDumpFileProcessor.logger.error(e.toString()); } finally { // unfortunately, xmlReader does not implement AutoClosable if (this.xmlReader != null) { try { this.xmlReader.close(); } catch (XMLStreamException e) { throw new RuntimeException( "Problem closing XML Reader. This hides an earlier exception.", e); } } } this.mwRevisionProcessor.finishRevisionProcessing(); } /** * Processes current XML starting from a <mediawiki> start tag up to * the corresponding end tag. This method uses the current state of * {@link #xmlReader} and stores its results in according member fields. * When the method has finished, {@link #xmlReader} will be at the next * element after the closing tag of this block. * * @throws XMLStreamException * if there was a problem reading the XML or if the XML is * malformed * @throws MwDumpFormatException * if the contents of the XML file did not match our * expectations of a MediaWiki XML dump */ void processXmlMediawiki() throws XMLStreamException, MwDumpFormatException { while (this.xmlReader.hasNext()) { switch (this.xmlReader.getEventType()) { case XMLStreamConstants.START_ELEMENT: switch (this.xmlReader.getLocalName()) { case MwRevisionDumpFileProcessor.E_MEDIAWIKI: break; case MwRevisionDumpFileProcessor.E_SITEINFO: processXmlSiteinfo(); this.mwRevisionProcessor.startRevisionProcessing( this.sitename, this.baseUrl, this.namespaces); break; case MwRevisionDumpFileProcessor.E_PAGE: tryProcessXmlPage(); break; } break; case XMLStreamConstants.END_ELEMENT: if (!"mediawiki".equals(this.xmlReader.getLocalName())) { throw new MwDumpFormatException("Unexpected end element ."); } break; } this.xmlReader.next(); } } /** * Processes current XML starting from a <siteinfo> start tag up to * the corresponding end tag. This method uses the current state of * {@link #xmlReader} and stores its results in according member fields. * When the method has finished, {@link #xmlReader} will be at the next * element after the closing tag of this block. * * @throws XMLStreamException * if there was a problem reading the XML or if the XML is * malformed * @throws MwDumpFormatException * if the contents of the XML file did not match our * expectations of a MediaWiki XML dump */ void processXmlSiteinfo() throws XMLStreamException { this.xmlReader.next(); // skip current start tag while (this.xmlReader.hasNext()) { switch (this.xmlReader.getEventType()) { case XMLStreamConstants.START_ELEMENT: switch (xmlReader.getLocalName()) { case MwRevisionDumpFileProcessor.E_SITENAME: this.sitename = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_NAMESPACE: Integer namespaceKey = Integer.parseInt( this.xmlReader.getAttributeValue(null, MwRevisionDumpFileProcessor.A_NSKEY)); this.namespaces.put(namespaceKey, this.xmlReader.getElementText()); break; case MwRevisionDumpFileProcessor.E_BASEURL: this.baseUrl = this.xmlReader.getElementText(); break; } break; case XMLStreamConstants.END_ELEMENT: if (MwRevisionDumpFileProcessor.E_SITEINFO .equals(this.xmlReader.getLocalName())) { return; } break; } this.xmlReader.next(); } } /** * Tries to processes current XML starting from a <page> start tag up * to the corresponding end tag using {@link #processXmlPage()}. If this * fails for some reason, it tries to recover to read all remaining page * blocks nonetheless. * * @throws XMLStreamException * if there was a problem reading the XML */ void tryProcessXmlPage() throws XMLStreamException { try { processXmlPage(); } catch (MwDumpFormatException e) { MwRevisionDumpFileProcessor.logger .error("Error when trying to process revision block for page \"" + this.mwRevision.getPrefixedTitle() + "\" (namespace " + this.mwRevision.getNamespace() + ", id " + this.mwRevision.getPageId() + "): " + e.toString()); MwRevisionDumpFileProcessor.logger.info("Trying to recover ..."); while (this.xmlReader.hasNext()) { this.xmlReader.next(); if (this.xmlReader.getEventType() == XMLStreamConstants.END_ELEMENT && this.xmlReader.getLocalName().equals(MwRevisionDumpFileProcessor.E_PAGE)) { MwRevisionDumpFileProcessor.logger .info("... recovery successful. Continuing processing."); return; } } MwRevisionDumpFileProcessor.logger .error("Recovery failed. Could not process remaining XML."); } } /** * Processes current XML starting from a <page> start tag up to the * corresponding end tag. This method uses the current state of * {@link #xmlReader} and stores its results in according member fields. * When the method has finished, {@link #xmlReader} will be at the next * element after the closing tag of this block. * * @throws XMLStreamException * if there was a problem reading the XML or if the XML is * malformed * @throws MwDumpFormatException * if the contents of the XML file did not match our * expectations of a MediaWiki XML dump */ void processXmlPage() throws XMLStreamException, MwDumpFormatException { this.mwRevision.resetCurrentPageData(); this.xmlReader.next(); // skip current start tag while (this.xmlReader.hasNext()) { switch (this.xmlReader.getEventType()) { case XMLStreamConstants.START_ELEMENT: switch (this.xmlReader.getLocalName()) { case MwRevisionDumpFileProcessor.E_PAGE_TITLE: this.mwRevision.prefixedTitle = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_PAGE_NAMESPACE: this.mwRevision.namespace = Integer.parseInt(this.xmlReader.getElementText()); break; case MwRevisionDumpFileProcessor.E_PAGE_ID: this.mwRevision.pageId = Integer.parseInt(this.xmlReader.getElementText()); break; case MwRevisionDumpFileProcessor.E_PAGE_REVISION: processXmlRevision(); break; case MwRevisionDumpFileProcessor.E_PAGE_REDIRECT: break; default: throw new MwDumpFormatException("Unexpected element \"" + this.xmlReader.getLocalName() + "\" in page."); } break; case XMLStreamConstants.END_ELEMENT: if (MwRevisionDumpFileProcessor.E_PAGE.equals(xmlReader .getLocalName())) { return; } break; } this.xmlReader.next(); } } /** * Processes current XML starting from a <revision> start tag up to * the corresponding end tag. This method uses the current state of * {@link #xmlReader} and stores its results in according member fields. * When the method has finished, {@link #xmlReader} will be at the next * element after the closing tag of this block. * * @throws XMLStreamException * if there was a problem reading the XML or if the XML is * malformed * @throws MwDumpFormatException * if the contents of the XML file did not match our * expectations of a MediaWiki XML dump */ void processXmlRevision() throws XMLStreamException, MwDumpFormatException { this.mwRevision.resetCurrentRevisionData(); this.xmlReader.next(); // skip current start tag while (this.xmlReader.hasNext()) { switch (this.xmlReader.getEventType()) { case XMLStreamConstants.START_ELEMENT: switch (this.xmlReader.getLocalName()) { case MwRevisionDumpFileProcessor.E_REV_COMMENT: this.mwRevision.comment = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_REV_TEXT: this.mwRevision.text = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_REV_TIMESTAMP: this.mwRevision.timeStamp = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_REV_FORMAT: this.mwRevision.format = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_REV_MODEL: this.mwRevision.model = this.xmlReader.getElementText(); break; case MwRevisionDumpFileProcessor.E_REV_CONTRIBUTOR: processXmlContributor(); break; case MwRevisionDumpFileProcessor.E_REV_ID: this.mwRevision.revisionId = Long.parseLong(this.xmlReader.getElementText()); break; case MwRevisionDumpFileProcessor.E_REV_PARENT_ID: this.mwRevision.parentRevisionId = Long.parseLong(this.xmlReader.getElementText()); break; case MwRevisionDumpFileProcessor.E_REV_SHA1: case MwRevisionDumpFileProcessor.E_REV_MINOR: break; default: throw new MwDumpFormatException("Unexpected element \"" + this.xmlReader.getLocalName() + "\" in revision."); } break; case XMLStreamConstants.END_ELEMENT: if (MwRevisionDumpFileProcessor.E_PAGE_REVISION .equals(this.xmlReader.getLocalName())) { this.mwRevisionProcessor.processRevision(this.mwRevision); return; } break; } this.xmlReader.next(); } } /** * Processes current XML starting from a <contributor> start tag up to * the corresponding end tag. This method uses the current state of * {@link #xmlReader} and stores its results in according member fields. * When the method has finished, {@link #xmlReader} will be at the next * element after the closing tag of this block. * * @throws XMLStreamException * if there was a problem reading the XML or if the XML is * malformed * @throws MwDumpFormatException * if the contents of the XML file did not match our * expectations of a MediaWiki XML dump */ void processXmlContributor() throws XMLStreamException, MwDumpFormatException { this.xmlReader.next(); // skip current start tag while (this.xmlReader.hasNext()) { switch (this.xmlReader.getEventType()) { case XMLStreamConstants.START_ELEMENT: switch (this.xmlReader.getLocalName()) { case MwRevisionDumpFileProcessor.E_CONTRIBUTOR_NAME: this.mwRevision.contributor = this.xmlReader .getElementText(); break; case MwRevisionDumpFileProcessor.E_CONTRIBUTOR_ID: this.mwRevision.contributorId = Integer.parseInt(this.xmlReader.getElementText()); break; case MwRevisionDumpFileProcessor.E_CONTRIBUTOR_IP: this.mwRevision.contributor = this.xmlReader .getElementText(); this.mwRevision.contributorId = -1; break; default: throw new MwDumpFormatException("Unexpected element \"" + this.xmlReader.getLocalName() + "\" in contributor."); } break; case XMLStreamConstants.END_ELEMENT: if (MwRevisionDumpFileProcessor.E_REV_CONTRIBUTOR .equals(this.xmlReader.getLocalName())) { return; } break; } this.xmlReader.next(); } } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/MwRevisionImpl.java000066400000000000000000000112111444772566300335070ustar00rootroot00000000000000package org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Implementation of {@link MwRevision}. The implementation is meant to be used * as a lightweight container that is reusable and thus mutable, but only using * package-private access. Even without the re-use (which might be reconsidered) * the package-private mutability makes sense during the stateful XML parsing * process. * * @author Markus Kroetzsch * */ public class MwRevisionImpl implements MwRevision { // TODO It should be evaluated later on if there is any notable // penalty when not reusing this object and creating a 100 million // additional objects when parsing a Wikidata dump. String prefixedTitle; String timeStamp; String text; String model; String format; String comment; String contributor; int contributorId; int namespace; int pageId; long revisionId; long parentRevisionId; /** * Constructor. */ public MwRevisionImpl() { resetCurrentPageData(); resetCurrentRevisionData(); } /** * Copy constructor. */ public MwRevisionImpl(MwRevision mwRevision) { this.prefixedTitle = mwRevision.getPrefixedTitle(); this.timeStamp = mwRevision.getTimeStamp(); this.text = mwRevision.getText(); this.model = mwRevision.getModel(); this.format = mwRevision.getFormat(); this.comment = mwRevision.getComment(); this.contributor = mwRevision.getContributor(); this.contributorId = mwRevision.getContributorId(); this.namespace = mwRevision.getNamespace(); this.pageId = mwRevision.getPageId(); this.revisionId = mwRevision.getRevisionId(); this.parentRevisionId = mwRevision.getParentRevisionId(); } @Override public String getPrefixedTitle() { return this.prefixedTitle; } @Override public String getTitle() { // We assume that 0 is always the main namespace, which has no prefix. // Without this assumption, the method would need the namespace map. if (this.namespace == 0) { return this.prefixedTitle; } else { return this.prefixedTitle .substring(this.prefixedTitle.indexOf(':') + 1); } } @Override public int getNamespace() { return this.namespace; } @Override public int getPageId() { return this.pageId; } @Override public long getRevisionId() { return this.revisionId; } @Override public long getParentRevisionId() { return this.parentRevisionId; } @Override public String getTimeStamp() { return this.timeStamp; } @Override public String getText() { return this.text; } @Override public String getModel() { return this.model; } @Override public String getFormat() { return this.format; } @Override public String getComment() { return this.comment; } @Override public String getContributor() { return this.contributor; } @Override public int getContributorId() { return this.contributorId; } @Override public boolean hasRegisteredContributor() { return (this.contributorId >= 0); } /** * Resets all member fields that hold information about the page that is * currently being processed. */ void resetCurrentPageData() { this.prefixedTitle = null; this.pageId = -1; // impossible as an id in MediaWiki this.namespace = 0; // possible value, but better than undefined } /** * Resets all member fields that hold information about the revision that is * currently being processed. */ void resetCurrentRevisionData() { this.revisionId = NO_REVISION_ID; // impossible as an id in MediaWiki this.parentRevisionId = NO_REVISION_ID; this.text = null; this.comment = null; this.format = null; this.timeStamp = null; this.model = null; } @Override public String toString() { return "Revision " + this.revisionId + " of page " + this.prefixedTitle + " (ns " + this.namespace + ", id " + this.pageId + "). Created at " + this.timeStamp + " by " + this.contributor + " (" + this.contributorId + ") with comment \"" + this.comment + "\". Model " + this.model + " (" + this.format + "). Text length: " + this.text.length() + " Parent revision id: " + this.parentRevisionId; } } MwRevisionProcessor.java000066400000000000000000000047661444772566300345270ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Map; /** * General interface for classes that process revisions of MediaWiki pages. * * @author Markus Kroetzsch * */ public interface MwRevisionProcessor { /** * Initialises the revision processor for processing revisions. General * information about the configuration of the site for which revisions are * being processed is provided. * * @param siteName * the name of the site * @param baseUrl * the base URL of the site * @param namespaces * map from integer namespace ids to namespace prefixes; * namespace strings do not include the final ":" used in * MediaWiki to separate namespace prefixes from article titles, * and the prefixes use spaces, not underscores as in MediaWiki * URLs. */ void startRevisionProcessing(String siteName, String baseUrl, Map namespaces); /** * Process the given MediaWiki revision. * * @param mwRevision * the revision to process */ void processRevision(MwRevision mwRevision); /** * Performs final actions that should be done after all revisions in a batch * of revisions have been processed. This is usually called after a whole * dumpfile is completely processed. *

* It is important to understand that this method might be called many times * during one processing run. Its main purpose is to signal the completion * of one file, not of the whole processing. This is used only to manage the * control flow of revision processing (e.g., to be sure that the most * recent revision of a page has certainly been found). This method must not * be used to do things that should happen at the very end of a run, such as * writing a file with results. */ void finishRevisionProcessing(); } MwRevisionProcessorBroker.java000066400000000000000000000155031444772566300356630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.List; import java.util.Map; import org.wikidata.wdtk.storage.datastructures.BitVector; import org.wikidata.wdtk.storage.datastructures.BitVectorImpl; /** * This MwRevisionPRocessor distributes revisions to subscribers that register * their interest in some type of message (revision). Duplicate revisions are * filtered. * * The broker also allows subscribers to receive only the most current revision * of a page rather than all revisions. To compute this efficiently, the broker * assumes that blocks of revisions are processed in inverse chronological * order, as it is the case when processing MediaWiki dump files in inverse * chronological order. Revisions within a single block of revisions for one * page do not need to be ordered in any specific way. * * @author Markus Kroetzsch * */ public class MwRevisionProcessorBroker implements MwRevisionProcessor { /** * Simple data container to store information about the registered * subscribers. * * @author Markus Kroetzsch * */ static class RevisionSubscription { MwRevisionProcessor mwRevisionProcessor; String model; boolean onlyCurrentRevisions; @Override public String toString() { return "Subscription of " + this.mwRevisionProcessor.getClass().toString() + " to model " + this.model + " (current: " + this.onlyCurrentRevisions + ")"; } } final List revisionSubscriptions; /** * Holds the most current revision found in the block of revisions that is * currently being processed. If the current page block is not the first for * that page, this will not be stored and the value is null. */ MwRevisionImpl mostCurrentRevision; /** * Page id of the currently processed block of page revisions. Used to * detect when the block changes. */ int currentPageId; final BitVector encounteredPages; final BitVector encounteredRevisions; public MwRevisionProcessorBroker() { this.revisionSubscriptions = new ArrayList<>(); this.mostCurrentRevision = null; this.currentPageId = -1; // TODO these initial sizes need to be configurable encounteredPages = new BitVectorImpl(20000000); encounteredRevisions = new BitVectorImpl(200000000); } /** * Registers an MwRevisionProcessor, which will henceforth be notified of * all revisions that are encountered in the dump. *

* Importantly, the {@link MwRevision} that the registered processors will * receive is owned by this {@link MwRevisionProcessorBroker}. Its data is * valid only during the execution of * {@link MwRevisionProcessor#processRevision(MwRevision)}, but it * will not be permanent. If the data is to be retained permanently, the * revision processor needs to make its own copy. * * @param mwRevisionProcessor * the revision processor to register * @param model * the content model that the processor is registered for; it * will only be notified of revisions in that model; if null is * given, all revisions will be processed whatever their model * @param onlyCurrentRevisions * if true, then the subscriber is only notified of the most * current revisions; if false, then it will receive all * revisions, current or not */ public void registerMwRevisionProcessor( MwRevisionProcessor mwRevisionProcessor, String model, boolean onlyCurrentRevisions) { MwRevisionProcessorBroker.RevisionSubscription rs = new RevisionSubscription(); rs.mwRevisionProcessor = mwRevisionProcessor; rs.model = model; rs.onlyCurrentRevisions = onlyCurrentRevisions; this.revisionSubscriptions.add(rs); } @Override public void startRevisionProcessing(String siteName, String baseUrl, Map namespaces) { for (MwRevisionProcessorBroker.RevisionSubscription rs : this.revisionSubscriptions) { rs.mwRevisionProcessor.startRevisionProcessing(siteName, baseUrl, namespaces); } } @Override public void processRevision(MwRevision mwRevision) { boolean revisionIsNew = !this.encounteredRevisions.getBit(mwRevision .getRevisionId()); if (revisionIsNew) { this.encounteredRevisions.setBit(mwRevision.getRevisionId(), true); } else { return; } if (mwRevision.getPageId() != this.currentPageId) { notifyMwRevisionProcessors(this.mostCurrentRevision, true); this.currentPageId = mwRevision.getPageId(); boolean currentPageIsNew = !this.encounteredPages .getBit(this.currentPageId); if (currentPageIsNew) { this.encounteredPages.setBit(this.currentPageId, true); this.mostCurrentRevision = new MwRevisionImpl(mwRevision); } else { this.mostCurrentRevision = null; } } else if (this.mostCurrentRevision != null && mwRevision.getRevisionId() > this.mostCurrentRevision .getRevisionId()) { this.mostCurrentRevision = new MwRevisionImpl(mwRevision); } notifyMwRevisionProcessors(mwRevision, false); } /** * Notifies all interested subscribers of the given revision. * * @param mwRevision * the given revision * @param isCurrent * true if this is guaranteed to be the most current revision */ void notifyMwRevisionProcessors(MwRevision mwRevision, boolean isCurrent) { if (mwRevision == null || mwRevision.getPageId() <= 0) { return; } for (MwRevisionProcessorBroker.RevisionSubscription rs : this.revisionSubscriptions) { if (rs.onlyCurrentRevisions == isCurrent && (rs.model == null || mwRevision.getModel().equals( rs.model))) { rs.mwRevisionProcessor.processRevision(mwRevision); } } } /** * Finalises the processing of one dump file (and hence of the current block * of pages). In particular, this means that the most current revision found * up to this point is really the most current one, so that subscribers * should be notified. */ @Override public void finishRevisionProcessing() { notifyMwRevisionProcessors(this.mostCurrentRevision, true); this.mostCurrentRevision = null; for (MwRevisionProcessorBroker.RevisionSubscription rs : this.revisionSubscriptions) { rs.mwRevisionProcessor.finishRevisionProcessing(); } } } MwSitesDumpFileProcessor.java000066400000000000000000000134441444772566300354370ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.implementation.SitesImpl; import org.wikidata.wdtk.datamodel.interfaces.Sites; /** * This class processes dump files that contain the SQL dump of the MediaWiki sites table. *

* The class expects all URLs in the dump to be protocol-relative (i.e., * starting with "//" rather than with "http://" or "https://") and it will * prepend "http:". * * @author Markus Kroetzsch * */ public class MwSitesDumpFileProcessor implements MwDumpFileProcessor { static final Logger logger = LoggerFactory .getLogger(MwSitesDumpFileProcessor.class); final SitesImpl sites = new SitesImpl(); /** * Returns the information about sites that has been extracted from the dump * file(s) processed earlier. * * @return the sites information */ public Sites getSites() { return this.sites; } @Override public void processDumpFileContents(InputStream inputStream, MwDumpFile dumpFile) { logger.info("Processing sites dump file " + dumpFile.toString()); BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(inputStream)); try { String line; while ((line = bufferedReader.readLine()) != null) { if (line.startsWith("INSERT INTO `sites` VALUES")) { Matcher matcher = Pattern.compile("[(][^)]*[)]").matcher( line.substring(27, line.length() - 1)); while (matcher.find()) { processSiteRow(matcher.group()); } break; // stop after finding rows } } } catch (IOException e) { MwSitesDumpFileProcessor.logger .error("IO Error when processing dump of sites table: " + e.toString()); } } /** * Processes a row of the sites table and stores the site information found * therein. * * @param siteRow * string serialisation of a sites table row as found in the SQL * dump */ void processSiteRow(String siteRow) { String[] row = getSiteRowFields(siteRow); String filePath = ""; String pagePath = ""; String dataArray = row[8].substring(row[8].indexOf('{'), row[8].length() - 2); // Explanation for the regular expression below: // "'{' or ';'" followed by either // "NOT: ';', '{', or '}'" repeated one or more times; or // "a single '}'" // The first case matches ";s:5:\"paths\"" // but also ";a:2:" in "{s:5:\"paths\";a:2:{s:9:\ ...". // The second case matches ";}" which terminates (sub)arrays. Matcher matcher = Pattern.compile("[{;](([^;}{][^;}{]*)|[}])").matcher( dataArray); String prevString = ""; String curString = ""; String path = ""; boolean valuePosition = false; while (matcher.find()) { String match = matcher.group().substring(1); if (match.length() == 0) { valuePosition = false; continue; } if (match.charAt(0) == 's') { valuePosition = !valuePosition && !"".equals(prevString); curString = match.substring(match.indexOf('"') + 1, match.length() - 2); } else if (match.charAt(0) == 'a') { valuePosition = false; path = path + "/" + prevString; } else if ("}".equals(match)) { valuePosition = false; path = path.substring(0, path.lastIndexOf('/')); } if (valuePosition && "file_path".equals(prevString) && "/paths".equals(path)) { filePath = curString; } else if (valuePosition && "page_path".equals(prevString) && "/paths".equals(path)) { pagePath = curString; } prevString = curString; curString = ""; } MwSitesDumpFileProcessor.logger.debug("Found site data \"" + row[1] + "\" (group \"" + row[3] + "\", language \"" + row[5] + "\", type \"" + row[2] + "\")"); this.sites.setSiteInformation(row[1], row[3], row[5], row[2], filePath, pagePath); } /** * Extract the individual fields for one row in the sites table. The entries * are encoded by position, with the following meaning: 0: site_id, 1: * site_global_key, 2: site_type, 3: site_group, 4: site_source 5: * site_language, 6: site_protocol, 7: site_domain, 8: site_data, 9: * site_forward, 10: site_config. The method assumes that this is the layout * of the table, which is the case in MediaWiki 1.21 and above. * * @param siteRow * the string representation of a row in the sites table, with * the surrounding parentheses * @return an array with the individual entries */ String[] getSiteRowFields(String siteRow) { String[] siteRowFields = new String[11]; Matcher matcher = Pattern.compile("[(,](['][^']*[']|[^'][^),]*)") .matcher(siteRow); int columnIndex = 0; while (matcher.find()) { String field = matcher.group().substring(1); if (field.charAt(0) == '\'') { field = field.substring(1, field.length() - 1); } siteRowFields[columnIndex] = field; // ... will throw an exception if there are more fields than // expected; this is fine. columnIndex++; } return siteRowFields; } } StatisticsMwRevisionProcessor.java000066400000000000000000000106151444772566300365700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.util.Timer; /** * A simple revision processor that counts some basic figures and logs the * result. * * @author Markus Kroetzsch * */ public class StatisticsMwRevisionProcessor implements MwRevisionProcessor { static final Logger logger = LoggerFactory .getLogger(StatisticsMwRevisionProcessor.class); final String name; final int logFrequency; long totalRevisionCount = 0; long currentRevisionCount = 0; final Timer totalTimer; final Timer currentTimer; /** * Constructor. * * @param name * a string name used in log messages to refer to this processor * @param logFrequency * the number of revisions after which an intermediate status * report should be logged; or -1 if no such reports should be * logged */ public StatisticsMwRevisionProcessor(String name, int logFrequency) { this.name = name; this.logFrequency = logFrequency; this.totalTimer = Timer.getNamedTimer(name + "-totalTimer", Timer.RECORD_ALL); this.currentTimer = Timer.getNamedTimer(name + "-currentTimer", Timer.RECORD_ALL); } /** * Returns the total number of revisions processed so far. * * @return the number of revisions */ public long getTotalRevisionCount() { return this.totalRevisionCount; } /** * Returns the number of revisions processed in the current run. * * @return the number of revisions */ public long getCurrentRevisionCount() { return this.currentRevisionCount; } @Override public void startRevisionProcessing(String siteName, String baseUrl, Map namespaces) { this.currentRevisionCount = 0; if (!this.totalTimer.isRunning()) { this.totalTimer.reset(); this.totalTimer.start(); } this.currentTimer.reset(); this.currentTimer.start(); StatisticsMwRevisionProcessor.logger.info("[" + this.name + "] Starting processing run for \"" + siteName + "\" (" + baseUrl + ")."); StatisticsMwRevisionProcessor.logger.info("[" + this.name + "] Namespaces: " + namespaces.toString()); } @Override public void processRevision(MwRevision mwRevision) { this.currentRevisionCount++; this.totalRevisionCount++; if (this.logFrequency > 0 && this.totalRevisionCount % this.logFrequency == 0) { logProgress(); } } @Override public void finishRevisionProcessing() { logProgress(); StatisticsMwRevisionProcessor.logger.info("[" + this.name + "] Finished processing run."); } void logProgress() { this.currentTimer.stop(); this.totalTimer.stop(); if (this.totalRevisionCount > 0) { StatisticsMwRevisionProcessor.logger.info("[" + this.name + "] Processed " + this.totalRevisionCount + " revisions (total) in " + this.totalTimer.getTotalWallTime() / 1000000000 + "s (wall)/" + this.totalTimer.getTotalCpuTime() / 1000000000 + "s (cpu). " + "Time per revision (mics): " + this.totalTimer.getTotalWallTime() / this.totalRevisionCount / 1000 + "/" + this.totalTimer.getTotalCpuTime() / this.totalRevisionCount / 1000); } if (this.currentRevisionCount > 0) { StatisticsMwRevisionProcessor.logger.info("[" + this.name + "] Processed " + this.currentRevisionCount + " revisions (current run) in " + this.currentTimer.getTotalWallTime() / 1000000000 + "s (wall)/" + this.currentTimer.getTotalCpuTime() / 1000000000 + "s (cpu)." + " Time per revision (mics): " + this.currentTimer.getTotalWallTime() / this.currentRevisionCount / 1000 + "/" + this.currentTimer.getTotalCpuTime() / this.currentRevisionCount / 1000); } this.currentTimer.start(); this.totalTimer.start(); } } WikibaseRevisionProcessor.java000066400000000000000000000141701444772566300356700ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.helpers.JsonDeserializer; import org.wikidata.wdtk.datamodel.interfaces.*; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; /** * A revision processor that processes Wikibase entity content from a dump file. * Revisions are parsed to obtain EntityDocument objects. * * @author Markus Kroetzsch * */ public class WikibaseRevisionProcessor implements MwRevisionProcessor { static final Logger logger = LoggerFactory .getLogger(WikibaseRevisionProcessor.class); /** * The IRI of the site that this data comes from. This cannot be extracted * from individual revisions. */ private final EntityDocumentProcessor entityDocumentProcessor; private final JsonDeserializer jsonDeserializer; /** * Constructor. * * @param entityDocumentProcessor * the object that entity documents will be forwarded to * @param siteIri * the IRI of the site that the data comes from, as used in * {@link ItemIdValue#getSiteIri()} */ public WikibaseRevisionProcessor( EntityDocumentProcessor entityDocumentProcessor, String siteIri) { this.entityDocumentProcessor = entityDocumentProcessor; this.jsonDeserializer = new JsonDeserializer(siteIri); } @Override public void startRevisionProcessing(String siteName, String baseUrl, Map namespaces) { // FIXME the baseUrl from the dump is not the baseIri we need here // Compute this properly. // this.jsonConverter = new JsonConverter( // "http://www.wikidata.org/entity/", this.dataObjectFactory); } @Override public void processRevision(MwRevision mwRevision) { if (MwRevision.MODEL_WIKIBASE_ITEM.equals(mwRevision.getModel())) { processItemRevision(mwRevision); } else if (MwRevision.MODEL_WIKIBASE_PROPERTY.equals(mwRevision .getModel())) { processPropertyRevision(mwRevision); } else if (MwRevision.MODEL_WIKIBASE_LEXEME.equals(mwRevision .getModel())) { processLexemeRevision(mwRevision); } // else: ignore this revision } public void processItemRevision(MwRevision mwRevision) { if(isWikibaseRedirection(mwRevision)) { processEntityRedirectRevision(mwRevision); return; } try { ItemDocument document = jsonDeserializer.deserializeItemDocument(mwRevision.getText()); entityDocumentProcessor.processItemDocument(document); } catch (JsonParseException e1) { logger.error("Failed to parse JSON for item " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); } catch (JsonMappingException e1) { logger.error("Failed to map JSON for item " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); e1.printStackTrace(); System.out.print(mwRevision.getText()); } catch (IOException e1) { logger.error("Failed to read revision: " + e1.getMessage()); } } public void processPropertyRevision(MwRevision mwRevision) { if(isWikibaseRedirection(mwRevision)) { processEntityRedirectRevision(mwRevision); return; } try { PropertyDocument document = jsonDeserializer.deserializePropertyDocument(mwRevision.getText()); entityDocumentProcessor.processPropertyDocument(document); } catch (JsonParseException e1) { logger.error("Failed to parse JSON for property " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); } catch (JsonMappingException e1) { logger.error("Failed to map JSON for property " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); e1.printStackTrace(); System.out.print(mwRevision.getText()); } catch (IOException e1) { logger.error("Failed to read revision: " + e1.getMessage()); } } private void processLexemeRevision(MwRevision mwRevision) { if(isWikibaseRedirection(mwRevision)) { processEntityRedirectRevision(mwRevision); return; } try { LexemeDocument document = jsonDeserializer.deserializeLexemeDocument(mwRevision.getText()); entityDocumentProcessor.processLexemeDocument(document); } catch (JsonParseException e1) { logger.error("Failed to parse JSON for lexeme " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); } catch (JsonMappingException e1) { logger.error("Failed to map JSON for lexeme " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); e1.printStackTrace(); System.out.print(mwRevision.getText()); } catch (IOException e1) { logger.error("Failed to read revision: " + e1.getMessage()); } } private void processEntityRedirectRevision(MwRevision mwRevision) { try { EntityRedirectDocument document = jsonDeserializer.deserializeEntityRedirectDocument(mwRevision.getText()); entityDocumentProcessor.processEntityRedirectDocument(document); } catch (JsonParseException e1) { logger.error("Failed to parse JSON for redirect " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); } catch (JsonMappingException e1) { logger.error("Failed to map JSON for redirect " + mwRevision.getPrefixedTitle() + ": " + e1.getMessage()); e1.printStackTrace(); System.out.print(mwRevision.getText()); } catch (IOException e1) { logger.error("Failed to read revision: " + e1.getMessage()); } } private boolean isWikibaseRedirection(MwRevision mwRevision) { return mwRevision.getText().contains("\"redirect\":"); //Hacky but fast } @Override public void finishRevisionProcessing() { // nothing to do } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/package-info.java000066400000000000000000000014721444772566300331170ustar00rootroot00000000000000/** * Components for downloading and processing Wikibase dump files. * * @author Markus Kroetzsch * */ package org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/wmf/000077500000000000000000000000001444772566300305155ustar00rootroot00000000000000JsonOnlineDumpFile.java000066400000000000000000000077501444772566300350160ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.util.DirectoryManager; import org.wikidata.wdtk.util.WebResourceFetcher; public class JsonOnlineDumpFile extends WmfDumpFile { static final Logger logger = LoggerFactory .getLogger(JsonOnlineDumpFile.class); final WebResourceFetcher webResourceFetcher; final DirectoryManager dumpfileDirectoryManager; private boolean isPrepared; /** * Constructor. Currently only "wikidatawiki" is supported as a project * name, since the dumps are placed under a non-systematic directory * structure that must be hard-coded for each project. * * @param dateStamp * dump date in format YYYYMMDD * @param projectName * project name string (e.g. "wikidatawiki") * @param webResourceFetcher * object to use for accessing the web * @param dumpfileDirectoryManager * the directory manager for the directory where dumps should be * downloaded to */ public JsonOnlineDumpFile(String dateStamp, String projectName, WebResourceFetcher webResourceFetcher, DirectoryManager dumpfileDirectoryManager) { super(dateStamp, projectName); this.webResourceFetcher = webResourceFetcher; this.dumpfileDirectoryManager = dumpfileDirectoryManager; } @Override public DumpContentType getDumpContentType() { return DumpContentType.JSON; } @Override public InputStream getDumpFileStream() throws IOException { prepareDumpFile(); String fileName = WmfDumpFile.getDumpFileName(DumpContentType.JSON, this.projectName, this.dateStamp); DirectoryManager dailyDirectoryManager = this.dumpfileDirectoryManager .getSubdirectoryManager(WmfDumpFile.getDumpFileDirectoryName( DumpContentType.JSON, this.dateStamp)); return dailyDirectoryManager.getInputStreamForFile(fileName, WmfDumpFile.getDumpFileCompressionType(fileName)); } @Override public void prepareDumpFile() throws IOException { if (this.isPrepared) { return; } String fileName = WmfDumpFile.getDumpFileName(DumpContentType.JSON, this.projectName, this.dateStamp); String urlString = getBaseUrl() + fileName; logger.info("Downloading JSON dump file " + fileName + " from " + urlString + " ..."); if (!isAvailable()) { throw new IOException( "Dump file not available (yet). Aborting dump retrieval."); } DirectoryManager dailyDirectoryManager = this.dumpfileDirectoryManager .getSubdirectoryManager(WmfDumpFile.getDumpFileDirectoryName( DumpContentType.JSON, this.dateStamp)); try (InputStream inputStream = webResourceFetcher .getInputStreamForUrl(urlString)) { dailyDirectoryManager.createFileAtomic(fileName, inputStream); } this.isPrepared = true; logger.info("... completed download of JSON dump file " + fileName + " from " + urlString); } @Override protected boolean fetchIsDone() { // WMF provides no easy way to check this for these files; // so just assume it is done return true; } /** * Returns the base URL under which the files for this dump are found. * * @return base URL */ String getBaseUrl() { return WmfDumpFile.getDumpFileWebDirectory(DumpContentType.JSON, this.projectName); } } WmfDumpFile.java000066400000000000000000000217651444772566300334730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.dumpfiles.MwDumpFile; import org.wikidata.wdtk.util.CompressionType; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Abstract base class for dump files provided by the Wikimedia Foundation. * * @author Markus Kroetzsch * */ public abstract class WmfDumpFile implements MwDumpFile { /** * The default URL of the website to obtain the dump files from. */ protected static final String DUMP_SITE_BASE_URL = "https://dumps.wikimedia.org/"; /** * Hash map defining the relative Web directory of each type of dump. */ static final Map WEB_DIRECTORY = new HashMap<>(); static { WmfDumpFile.WEB_DIRECTORY.put(DumpContentType.DAILY, "other/incr/"); WmfDumpFile.WEB_DIRECTORY.put(DumpContentType.CURRENT, ""); WmfDumpFile.WEB_DIRECTORY.put(DumpContentType.FULL, ""); WmfDumpFile.WEB_DIRECTORY.put(DumpContentType.SITES, ""); WmfDumpFile.WEB_DIRECTORY.put(DumpContentType.JSON, "other/"); } /** * Hash map defining file name ending of each type of dump. */ static final Map POSTFIXES = new HashMap<>(); static { WmfDumpFile.POSTFIXES.put(DumpContentType.DAILY, "-pages-meta-hist-incr.xml.bz2"); WmfDumpFile.POSTFIXES.put(DumpContentType.CURRENT, "-pages-meta-current.xml.bz2"); WmfDumpFile.POSTFIXES.put(DumpContentType.FULL, "-pages-meta-history.xml.bz2"); WmfDumpFile.POSTFIXES.put(DumpContentType.SITES, "-sites.sql.gz"); WmfDumpFile.POSTFIXES.put(DumpContentType.JSON, ".json.gz"); } /** * Hash map defining whether a certain type of dump is a dump of page * revisions or not. Dumps with page revisions have a maximal revision id, * while other dump files do not. */ static final Map REVISION_DUMP = new HashMap<>(); static { WmfDumpFile.REVISION_DUMP.put(DumpContentType.DAILY, true); WmfDumpFile.REVISION_DUMP.put(DumpContentType.CURRENT, true); WmfDumpFile.REVISION_DUMP.put(DumpContentType.FULL, true); WmfDumpFile.REVISION_DUMP.put(DumpContentType.SITES, false); WmfDumpFile.REVISION_DUMP.put(DumpContentType.JSON, false); } protected final String dateStamp; protected final String projectName; Boolean isDone; public WmfDumpFile(String dateStamp, String projectName) { this.dateStamp = dateStamp; this.projectName = projectName; } @Override public String getProjectName() { return this.projectName; } @Override public String getDateStamp() { return this.dateStamp; } @Override public boolean isAvailable() { if (isDone == null) { isDone = fetchIsDone(); } return isDone; } @Override public String toString() { return this.projectName + "-" + getDumpContentType().toString().toLowerCase() + "-" + this.dateStamp; } @Override public BufferedReader getDumpFileReader() throws IOException { return new BufferedReader(new InputStreamReader(getDumpFileStream(), StandardCharsets.UTF_8)); } /** * Finds out if the dump is ready. For online dumps, this should return true * if the file can be fetched from the Web. For local dumps, this should * return true if the file is complete and not corrupted. For some types of * dumps, there are ways of checking this easily (i.e., without reading the * full file). If this is not possible, then the method should just return * "true." * * @return true if the dump is done */ protected abstract boolean fetchIsDone(); /** * Returns the ending used by the Wikimedia-provided dumpfile names of the * given type. * * @param dumpContentType * the type of dump * @return postfix of the dumpfile name * @throws IllegalArgumentException * if the given dump file type is not known */ public static String getDumpFilePostfix(DumpContentType dumpContentType) { if (WmfDumpFile.POSTFIXES.containsKey(dumpContentType)) { return WmfDumpFile.POSTFIXES.get(dumpContentType); } else { throw new IllegalArgumentException("Unsupported dump type " + dumpContentType); } } /** * Returns the absolute directory on the Web site where dumpfiles of the * given type can be found. * * @param dumpContentType * the type of dump * @return relative web directory for the current dumpfiles * @throws IllegalArgumentException * if the given dump file type is not known */ public static String getDumpFileWebDirectory( DumpContentType dumpContentType, String projectName) { if (dumpContentType == DumpContentType.JSON) { if ("wikidatawiki".equals(projectName)) { return WmfDumpFile.DUMP_SITE_BASE_URL + WmfDumpFile.WEB_DIRECTORY.get(dumpContentType) + "wikidata" + "/"; } else { throw new RuntimeException( "Wikimedia Foundation uses non-systematic directory names for this type of dump file." + " I don't know where to find dumps of project " + projectName); } } else if (WmfDumpFile.WEB_DIRECTORY.containsKey(dumpContentType)) { return WmfDumpFile.DUMP_SITE_BASE_URL + WmfDumpFile.WEB_DIRECTORY.get(dumpContentType) + projectName + "/"; } else { throw new IllegalArgumentException("Unsupported dump type " + dumpContentType); } } /** * Returns the compression type of this kind of dump file using file suffixes * * @param fileName the name of the file * @return compression type * @throws IllegalArgumentException * if the given dump file type is not known */ public static CompressionType getDumpFileCompressionType(String fileName) { if (fileName.endsWith(".gz")) { return CompressionType.GZIP; } else if (fileName.endsWith(".bz2")) { return CompressionType.BZ2; } else { return CompressionType.NONE; } } /** * Returns the name of the directory where the dumpfile of the given type * and date should be stored. * * @param dumpContentType * the type of the dump * @param dateStamp * the date of the dump in format YYYYMMDD * @return the local directory name for the dumpfile */ public static String getDumpFileDirectoryName( DumpContentType dumpContentType, String dateStamp) { return dumpContentType.toString().toLowerCase() + "-" + dateStamp; } /** * Extracts the date stamp from a dumpfile directory name in the form that * is created by {@link #getDumpFileDirectoryName(DumpContentType, String)}. * It is not checked that the given directory name has the right format; if * it has not, the result will not be a date stamp but some other string. * * @param dumpContentType * @param directoryName * @return the date stamp */ public static String getDateStampFromDumpFileDirectoryName( DumpContentType dumpContentType, String directoryName) { int prefixLength = dumpContentType.toString().length() + 1; return directoryName.substring(prefixLength); } /** * Returns the name under which this dump file. This is the name used online * and also locally when downloading the file. * * @param dumpContentType * the type of the dump * @param projectName * the project name, e.g. "wikidatawiki" * @param dateStamp * the date of the dump in format YYYYMMDD * @return file name string */ public static String getDumpFileName(DumpContentType dumpContentType, String projectName, String dateStamp) { if (dumpContentType == DumpContentType.JSON) { return dateStamp + WmfDumpFile.getDumpFilePostfix(dumpContentType); } else { return projectName + "-" + dateStamp + WmfDumpFile.getDumpFilePostfix(dumpContentType); } } /** * Returns true if the given dump file type contains page revisions and * false if it does not. Dumps that do not contain pages are for auxiliary * information such as linked sites. * * @param dumpContentType * the type of dump * @return true if the dumpfile contains revisions * @throws IllegalArgumentException * if the given dump file type is not known */ public static boolean isRevisionDumpFile(DumpContentType dumpContentType) { if (WmfDumpFile.REVISION_DUMP.containsKey(dumpContentType)) { return WmfDumpFile.REVISION_DUMP.get(dumpContentType); } else { throw new IllegalArgumentException("Unsupported dump type " + dumpContentType); } } } WmfDumpFileManager.java000066400000000000000000000314051444772566300347560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/main/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.dumpfiles.DumpProcessingController; import org.wikidata.wdtk.dumpfiles.MwDumpFile; import org.wikidata.wdtk.util.DirectoryManager; import org.wikidata.wdtk.util.WebResourceFetcher; import org.wikidata.wdtk.util.WebResourceFetcherImpl; /** * Class for providing access to available dumpfiles provided by the Wikimedia * Foundation. The preferred access point for this class is * {@link DumpProcessingController#processAllRecentRevisionDumps()}, since this * method takes care of freeing resources and might also provide parallelized * downloading/processing in the future. *

* Typically, the Web will be accessed to find information about dumps available * online. This Web access is mediated by a {@link WebResourceFetcherImpl} * object, provided upon construction. If null is given instead, the class will * operate in offline mode, using only previously downloaded files. *

* The location of the Wikimedia download site is currently hardwired, since the * extraction methods used to get the data are highly specific to the format of * files on this site. Other sites (if any) would most likely need different * methods. * * @author Markus Kroetzsch * */ public class WmfDumpFileManager { static final Logger logger = LoggerFactory .getLogger(WmfDumpFileManager.class); /** * The regular expression that a date stamp should match. */ static final String DATE_STAMP_PATTERN = "\\d\\d\\d\\d\\d\\d\\d\\d"; /** * The name of the directory where downloaded dump files are stored. */ public static final String DOWNLOAD_DIRECTORY_NAME = "dumpfiles"; final String projectName; final DirectoryManager dumpfileDirectoryManager; final WebResourceFetcher webResourceFetcher; /** * Constructor. * * @param projectName * name of the project to obtain dumps for as used in the folder * structure of the dump site, e.g., "wikidatawiki" * @param downloadDirectoryManager * the directory manager for the directory where the download * directory for dump files should be; it will be created if * needed * @param webResourceFetcher * the web resource fetcher to access web resources or null if no * web access should happen * @throws IOException * if it was not possible to access the directory for managing * dumpfiles */ public WmfDumpFileManager(String projectName, DirectoryManager downloadDirectoryManager, WebResourceFetcher webResourceFetcher) throws IOException { this.projectName = projectName; this.dumpfileDirectoryManager = downloadDirectoryManager .getSubdirectoryManager( WmfDumpFileManager.DOWNLOAD_DIRECTORY_NAME) .getSubdirectoryManager(projectName); this.webResourceFetcher = webResourceFetcher; WmfDumpFileManager.logger.info("Using download directory " + this.dumpfileDirectoryManager.toString()); } /** * Finds all page revision dump files, online or locally, that are relevant * to obtain the most current state of the data. Revision dump files are * dumps that contain page revisions in MediaWiki's XML format. *

* If the parameter preferCurrent is true, then dumps that contain * only the current versions of all files will be preferred if available * anywhere, even over previously downloaded dump files that contain all * versions. However, dump files may still contain non-current revisions, * and when processing multiple dumps there might even be overlaps (one * revision occurring in multiple dumps). *

* The result is ordered with the most recent dump first. If a dump file A * contains revisions of a page P, and Rmax is the maximal revision of P in * A, then every dump file that comes after A should contain only revisions * of P that are smaller than or equal to Rmax. In other words, the maximal * revision found in the first file that contains P at all should also be * the maximal revision overall. * * @param preferCurrent * should dumps with current revisions be preferred? * @return an ordered list of all dump files that match the given criteria */ public List findAllRelevantRevisionDumps(boolean preferCurrent) { MwDumpFile mainDump; if (preferCurrent) { mainDump = findMostRecentDump(DumpContentType.CURRENT); } else { mainDump = findMostRecentDump(DumpContentType.FULL); } if (mainDump == null) { return findAllDumps(DumpContentType.DAILY); } List result = new ArrayList<>(); for (MwDumpFile dumpFile : findAllDumps(DumpContentType.DAILY)) { if (dumpFile.getDateStamp().compareTo(mainDump.getDateStamp()) > 0) { result.add(dumpFile); } } result.add(mainDump); if (logger.isInfoEnabled()) { StringBuilder logMessage = new StringBuilder(); logMessage.append("Found ") .append(result.size()) .append(" relevant dumps to process:"); for (MwDumpFile dumpFile : result) { logMessage.append("\n * ").append(dumpFile.toString()); } logger.info(logMessage.toString()); } return result; } /** * Finds the most recent dump of the given type that is actually available. * * @param dumpContentType * the type of the dump to look for * @return most recent main dump or null if no such dump exists */ public MwDumpFile findMostRecentDump(DumpContentType dumpContentType) { List dumps = findAllDumps(dumpContentType); for (MwDumpFile dump : dumps) { if (dump.isAvailable()) { return dump; } } return null; } /** * Returns a list of all dump files of the given type available either * online or locally. For dumps available both online and locally, the local * version is included. The list is ordered with most recent dump date * first. Online dumps found by this method might not be available yet (if * their directory has been created online but the file was not uploaded or * completely written yet). * * @return a list of dump files of the given type */ public List findAllDumps(DumpContentType dumpContentType) { List localDumps = findDumpsLocally(dumpContentType); if (this.webResourceFetcher != null) { List onlineDumps = findDumpsOnline(dumpContentType); return mergeDumpLists(localDumps, onlineDumps); } else { return localDumps; } } /** * Merges a list of local and online dumps. For dumps available both online * and locally, only the local version is included. The list is order with * most recent dump date first. * * @return a merged list of dump files */ List mergeDumpLists(List localDumps, List onlineDumps) { List result = new ArrayList<>(localDumps); HashSet localDateStamps = new HashSet<>(); for (MwDumpFile dumpFile : localDumps) { localDateStamps.add(dumpFile.getDateStamp()); } for (MwDumpFile dumpFile : onlineDumps) { if (!localDateStamps.contains(dumpFile.getDateStamp())) { result.add(dumpFile); } } result.sort(Collections.reverseOrder(new MwDumpFile.DateComparator())); return result; } /** * Finds out which dump files of the given type have been downloaded * already. The result is a list of objects that describe the available dump * files, in descending order by their date. Not all of the dumps included * might be actually available. * * @param dumpContentType * the type of dump to consider * @return list of objects that provide information on available dumps */ List findDumpsLocally(DumpContentType dumpContentType) { String directoryPattern = WmfDumpFile.getDumpFileDirectoryName( dumpContentType, "*"); List dumpFileDirectories; try { dumpFileDirectories = this.dumpfileDirectoryManager .getSubdirectories(directoryPattern); } catch (IOException e) { logger.error("Unable to access dump directory: " + e.toString()); return Collections.emptyList(); } List result = new ArrayList<>(); for (String directory : dumpFileDirectories) { String dateStamp = WmfDumpFile .getDateStampFromDumpFileDirectoryName(dumpContentType, directory); if (dateStamp.matches(WmfDumpFileManager.DATE_STAMP_PATTERN)) { WmfLocalDumpFile dumpFile = new WmfLocalDumpFile(dateStamp, this.projectName, dumpfileDirectoryManager, dumpContentType); if (dumpFile.isAvailable()) { result.add(dumpFile); } else { logger.error("Incomplete local dump file data. Maybe delete " + dumpFile.getDumpfileDirectory() + " to attempt fresh download."); } } // else: silently ignore directories that don't match } result.sort(Collections.reverseOrder(new MwDumpFile.DateComparator())); logger.info("Found " + result.size() + " local dumps of type " + dumpContentType + ": " + result); return result; } /** * Finds out which dump files of the given type are available for download. * The result is a list of objects that describe the available dump files, * in descending order by their date. Not all of the dumps included might be * actually available. * * @return list of objects that provide information on available full dumps */ List findDumpsOnline(DumpContentType dumpContentType) { List dumpFileDates = findDumpDatesOnline(dumpContentType); List result = new ArrayList<>(); for (String dateStamp : dumpFileDates) { if (dumpContentType == DumpContentType.DAILY) { result.add(new WmfOnlineDailyDumpFile(dateStamp, this.projectName, this.webResourceFetcher, this.dumpfileDirectoryManager)); } else if (dumpContentType == DumpContentType.JSON) { result.add(new JsonOnlineDumpFile(dateStamp, this.projectName, this.webResourceFetcher, this.dumpfileDirectoryManager)); } else { result.add(new WmfOnlineStandardDumpFile(dateStamp, this.projectName, this.webResourceFetcher, this.dumpfileDirectoryManager, dumpContentType)); } } logger.info("Found " + result.size() + " online dumps of type " + dumpContentType + ": " + result); return result; } /** * Finds out which dump files are available for download in a given * directory. The result is a list of YYYYMMDD date stamps, ordered newest * to oldest. The list is based on the directories or files found at the * target location, without considering whether or not each dump is actually * available. *

* The implementation is rather uniform since all cases supported thus far * use directory/file names that start with a date stamp. If the date would * occur elsewhere or in another form, then more work would be needed. * * @param dumpContentType * the type of dump to consider * @return list of date stamps */ List findDumpDatesOnline(DumpContentType dumpContentType) { List result = new ArrayList<>(); try (InputStream in = this.webResourceFetcher .getInputStreamForUrl(WmfDumpFile.getDumpFileWebDirectory( dumpContentType, this.projectName))) { BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(in, StandardCharsets.UTF_8)); String inputLine; while ((inputLine = bufferedReader.readLine()) != null) { String dateStamp = ""; if (inputLine.startsWith("")) { // old format of HTML file lists dateStamp = inputLine.substring(27, 35); } else if (inputLine.startsWith(" 1000000000) { busywait = false; } timer.start(); } } } } @Test public void testRegularJsonProcessing() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); setLocalJsonDumpFile("mock-dump-for-testing.json", "20150223", dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); EntityTimerProcessor timer = new EntityTimerProcessor(0); dpc.registerEntityDocumentProcessor(timer, null, true); timer.open(); dpc.processMostRecentJsonDump(); timer.close(); assertEquals(3, timer.entityCount); } @Test public void testBuggyJsonProcessing() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); setLocalJsonDumpFile("mock-dump-with-bugs.json", "20150223", dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); EntityTimerProcessor timer = new EntityTimerProcessor(0); dpc.registerEntityDocumentProcessor(timer, null, true); timer.open(); dpc.processMostRecentJsonDump(); timer.close(); assertTrue(timer.entityCount >= 3); } /** * TODO: fix on JDK 9 and enable again */ @Test(expected = EntityTimerProcessor.TimeoutException.class) @Ignore public void testTimeout() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); setLocalJsonDumpFile("mock-dump-for-long-testing.json", "20150223", dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); EntityTimerProcessor timer = new EntityTimerProcessor(1); timer.setReportInterval(1); dpc.registerEntityDocumentProcessor(timer, null, true); dpc.registerEntityDocumentProcessor(new SlowDocumentProcessor(), null, true); timer.open(); dpc.processMostRecentJsonDump(); timer.close(); } @Test public void testNonTimeout() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); setLocalJsonDumpFile("mock-dump-for-long-testing.json", "20150223", dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); EntityTimerProcessor timer = new EntityTimerProcessor(0); dpc.registerEntityDocumentProcessor(timer, null, true); dpc.registerEntityDocumentProcessor(new SlowDocumentProcessor(), null, true); timer.open(); dpc.processMostRecentJsonDump(); timer.close(); assertEquals(101, timer.entityCount); } private void setLocalJsonDumpFile(String fileName, String dateStamp, MockDirectoryManager dm) throws IOException { DumpContentType dumpContentType = DumpContentType.JSON; URL resourceUrl = MwDumpFileProcessingTest.class.getResource("/" + fileName); Path dmPath = Paths.get(System.getProperty("user.dir")); Path dumpFilePath = dmPath.resolve("dumpfiles").resolve("wikidatawiki"); Path thisDumpPath = dumpFilePath.resolve(dumpContentType.toString() .toLowerCase() + "-" + dateStamp); Path filePath = thisDumpPath.resolve(dateStamp + WmfDumpFile.getDumpFilePostfix(dumpContentType)); dm.setFileContents(filePath, MockStringContentFactory.getStringFromUrl(resourceUrl), WmfDumpFile.getDumpFileCompressionType(filePath.toString())); } } MwDumpFileProcessingTest.java000066400000000000000000000463551444772566300354660ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.io.IOException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.dumpfiles.wmf.WmfDumpFile; import org.wikidata.wdtk.dumpfiles.wmf.WmfLocalDumpFile; import org.wikidata.wdtk.testing.MockDirectoryManager; import org.wikidata.wdtk.testing.MockStringContentFactory; import org.wikidata.wdtk.util.DirectoryManagerFactory; public class MwDumpFileProcessingTest { /** * Helper class that stores all information passed to it for later testing. * * @author Markus Kroetzsch * */ static class TestMwRevisionProcessor implements MwRevisionProcessor { final List revisions = new ArrayList<>(); String siteName; String baseUrl; Map namespaces; @Override public void startRevisionProcessing(String siteName, String baseUrl, Map namespaces) { this.siteName = siteName; this.baseUrl = baseUrl; this.namespaces = namespaces; } @Override public void processRevision(MwRevision mwRevision) { this.revisions.add(new MwRevisionImpl(mwRevision)); } @Override public void finishRevisionProcessing() { } } /** * Helper class that counts how many items it gets. * * @author Markus Kroetzsch * */ static class TestEntityDocumentProcessor implements EntityDocumentProcessor { int itemCount = 0; int propCount = 0; @Override public void processItemDocument(ItemDocument itemDocument) { this.itemCount++; } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { this.propCount++; } } @Before public void configureDirectoryManager() { DirectoryManagerFactory .setDirectoryManagerClass(MockDirectoryManager.class); } /** * Generates a simple item revision for testing purposes. * * @param number */ private MwRevision getItemRevision(int number) { MwRevisionImpl result = new MwRevisionImpl(); result.prefixedTitle = "Q1"; result.namespace = 0; result.pageId = 32; result.revisionId = number; result.parentRevisionId = number - 1; result.timeStamp = "2014-02-19T23:34:1" + (number % 10) + "Z"; result.format = "application/json"; result.model = MwRevision.MODEL_WIKIBASE_ITEM; result.comment = "Test comment " + number; result.text = "{\"id\":\"Q1\",\"type\":\"item\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"Revision " + number + "\"}}}"; result.contributor = "127.0.0." + (number % 256); result.contributorId = -1; return result; } /** * Generates a simple property revision for testing purposes. * * @param number */ private MwRevision getPropertyRevision(int number) { MwRevisionImpl result = new MwRevisionImpl(); result.prefixedTitle = "Property:P1"; result.namespace = 120; result.pageId = 12345; result.revisionId = number + 10000; result.parentRevisionId = number + 9999; result.timeStamp = "2014-02-19T23:34:1" + (number % 10) + "Z"; result.format = "application/json"; result.model = MwRevision.MODEL_WIKIBASE_PROPERTY; result.comment = "Test comment " + (number + 10000); result.text = "{\"id\":\"P1\",\"type\":\"property\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"Revision " + (number + 10000) + "\"}},\"datatype\":\"wikibase-item\"}"; result.contributor = "127.0.0." + (number % 256); result.contributorId = -1; return result; } /** * Generates a simple lexeme revision for testing purposes. * * @param number */ private MwRevision getLexemeRevision(int number) { MwRevisionImpl result = new MwRevisionImpl(); result.prefixedTitle = "Lexeme:L1"; result.namespace = 122; result.pageId = 1235667; result.revisionId = number + 100000; result.parentRevisionId = number + 8888; result.timeStamp = "2014-02-19T23:34:1" + (number % 10) + "Z"; result.format = "application/json"; result.model = MwRevision.MODEL_WIKIBASE_LEXEME; result.comment = "Test comment " + (number + 10000); result.text = "{\"type\":\"lexeme\",\"id\":\"L1\",\"lexicalCategory\":\"Q" + number / 2 + "\",\"language\":\"Q" + number + "\"}"; result.contributor = "127.0.0." + (number % 256); result.contributorId = -1; return result; } /** * Generates a simple page revision for testing purposes. * * @param number */ private MwRevision getPageRevision(int number) { MwRevisionImpl result = new MwRevisionImpl(); result.prefixedTitle = "Wikidata:Contact the development team"; result.namespace = 4; result.pageId = 181; result.revisionId = 110689110 + number; result.parentRevisionId = 110689109 + number; result.timeStamp = "2014-02-20T23:34:1" + number + "Z"; result.format = "text/x-wiki"; result.model = MwRevision.MODEL_WIKITEXT; result.comment = "Test comment " + number; result.text = "Test wikitext " + number + "\nLine 2\nLine 3"; result.contributor = "User " + number; result.contributorId = 1000 + number; return result; } /** * Assert that two revisions are equal. Better than using equals() since it * generates more useful error reports. * * @param rev1 * @param rev2 */ private void assertEqualRevisions(MwRevision rev1, MwRevision rev2, String test) { assertEquals("[" + test + "] Revision prefixed titles do not match:", rev1.getPrefixedTitle(), rev2.getPrefixedTitle()); assertEquals("[" + test + "] Revision namespaces do not match:", rev1.getNamespace(), rev2.getNamespace()); assertEquals("[" + test + "] Revision page ids do not match:", rev1.getPageId(), rev2.getPageId()); assertEquals("[" + test + "] Revision ids do not match:", rev1.getRevisionId(), rev2.getRevisionId()); assertEquals("[" + test + "] Revision parent ids do not match:", rev1.getParentRevisionId(), rev2.getParentRevisionId()); assertEquals("[" + test + "] Revision timestamps do not match:", rev1.getTimeStamp(), rev2.getTimeStamp()); assertEquals("[" + test + "] Revision formats do not match:", rev1.getFormat(), rev2.getFormat()); assertEquals("[" + test + "] Revision models do not match:", rev1.getModel(), rev2.getModel()); assertEquals("[" + test + "] Revision comments do not match:", rev1.getComment(), rev2.getComment()); assertEquals("[" + test + "] Revision texts do not match:", rev1.getText(), rev2.getText()); assertEquals("[" + test + "] Revision contributors do not match:", rev1.getContributor(), rev2.getContributor()); assertEquals("[" + test + "] Revision contributor ids do not match:", rev1.getContributorId(), rev2.getContributorId()); } /** * Assert that two lists contain the same revisions in the same order. * * @param list1 * @param list2 */ private void assertEqualRevisionLists(List list1, List list2, String test) { assertEquals("[" + test + "] Size of revision lists does not match:", list1.size(), list2.size()); for (int i = 0; i < list1.size(); i++) { assertEqualRevisions(list1.get(i), list2.get(i), test + "-item" + i); } } @Test public void testIncompleteDumpFile() throws IOException { URL resourceUrl = MwDumpFileProcessingTest.class .getResource("/mock-dump-incomplete-revision.xml"); MwDumpFile mockDumpFile = Mockito.mock(WmfLocalDumpFile.class); MwRevisionProcessorBroker mwrpBroker = new MwRevisionProcessorBroker(); TestMwRevisionProcessor tmrpAll = new TestMwRevisionProcessor(); mwrpBroker.registerMwRevisionProcessor(tmrpAll, null, false); MwRevisionDumpFileProcessor mwdfp = new MwRevisionDumpFileProcessor( mwrpBroker); mwdfp.processDumpFileContents(resourceUrl.openStream(), mockDumpFile); List revisionsAll = new ArrayList<>(); revisionsAll.add(getItemRevision(4)); assertEqualRevisionLists(revisionsAll, tmrpAll.revisions, "all-incomplete"); } @Test public void testBuggyDumpFile() throws IOException { URL resourceUrl = MwDumpFileProcessingTest.class .getResource("/mock-dump-with-bugs.xml"); MwDumpFile mockDumpFile = Mockito.mock(WmfLocalDumpFile.class); MwRevisionProcessorBroker mwrpBroker = new MwRevisionProcessorBroker(); TestMwRevisionProcessor tmrpAll = new TestMwRevisionProcessor(); mwrpBroker.registerMwRevisionProcessor(tmrpAll, null, false); MwRevisionDumpFileProcessor mwdfp = new MwRevisionDumpFileProcessor( mwrpBroker); mwdfp.processDumpFileContents(resourceUrl.openStream(), mockDumpFile); List revisionsAll = new ArrayList<>(); revisionsAll.add(getItemRevision(4)); revisionsAll.add(getItemRevision(5)); revisionsAll.add(getPageRevision(1)); revisionsAll.add(getPageRevision(2)); assertEqualRevisionLists(revisionsAll, tmrpAll.revisions, "all-incomplete"); } private void setLocalDumpFile(String dateStamp, DumpContentType dumpContentType, MockDirectoryManager dm) throws IOException { URL resourceUrl = MwDumpFileProcessingTest.class .getResource("/mock-dump-for-testing.xml"); Path dmPath = Paths.get(System.getProperty("user.dir")); Path dumpFilePath = dmPath.resolve("dumpfiles").resolve("wikidatawiki"); Path thisDumpPath = dumpFilePath.resolve(dumpContentType.toString() .toLowerCase() + "-" + dateStamp); Path filePath = thisDumpPath.resolve("wikidatawiki-" + dateStamp + WmfDumpFile.getDumpFilePostfix(dumpContentType)); dm.setFileContents(filePath, MockStringContentFactory.getStringFromUrl(resourceUrl), WmfDumpFile.getDumpFileCompressionType(filePath.toString())); } /** * Creates a mocked local dump file with three pages, each with three * revisions starting from the given baseId (plus some offset per page). * * @param dateStamp * @param baseId * @param dumpContentType * @param dm * @throws IOException */ private void mockLocalDumpFile(String dateStamp, int baseId, DumpContentType dumpContentType, MockDirectoryManager dm) throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); Path dumpFilePath = dmPath.resolve("dumpfiles").resolve("wikidatawiki"); dm.setDirectory(dmPath); dm.setDirectory(dmPath.resolve("dumpfiles")); dm.setDirectory(dumpFilePath); Path thisDumpPath = dumpFilePath.resolve(dumpContentType.toString() .toLowerCase() + "-" + dateStamp); URL resourceUrl = MwDumpFileProcessingTest.class .getResource("/mock-dump-header.xml"); String dumpContents = MockStringContentFactory .getStringFromUrl(resourceUrl); for (int pageId = baseId; pageId < baseId + 3; pageId++) { dumpContents += " \n"; dumpContents += " Q" + pageId + "\n"; dumpContents += " 0\n"; dumpContents += " " + (pageId + 1000) + "\n"; for (int revId = pageId * 1000 + baseId + 1; revId < pageId * 1000 + baseId + 4; revId++) { dumpContents += " \n"; dumpContents += " " + revId + "\n"; dumpContents += " " + (revId - 1) + "\n"; dumpContents += " 2014-02-19T23:34:0" + (revId % 10) + "\n"; dumpContents += " "; dumpContents += " 127.0.0." + (revId % 256) + "\n"; dumpContents += " \n"; dumpContents += " Test comment " + revId + "\n"; dumpContents += " {"label":{"en":"Revision " + revId + ""}}\n"; dumpContents += " ignored"; dumpContents += " wikibase-item"; dumpContents += " application/json"; dumpContents += " \n"; } dumpContents += " \n"; } dumpContents += "\n"; Path filePath = thisDumpPath.resolve("wikidatawiki-" + dateStamp + WmfDumpFile.getDumpFilePostfix(dumpContentType)); dm.setFileContents(filePath, dumpContents, WmfDumpFile.getDumpFileCompressionType(filePath.toString())); } @Test public void testMwDailyDumpFileProcessing() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); setLocalDumpFile("20140420", DumpContentType.DAILY, dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); StatisticsMwRevisionProcessor mwrpAllStats = new StatisticsMwRevisionProcessor( "all", 2); dpc.registerMwRevisionProcessor(mwrpAllStats, null, false); TestMwRevisionProcessor tmrpAll = new TestMwRevisionProcessor(); dpc.registerMwRevisionProcessor(tmrpAll, null, false); TestMwRevisionProcessor tmrpAllCurrent = new TestMwRevisionProcessor(); dpc.registerMwRevisionProcessor(tmrpAllCurrent, null, true); TestMwRevisionProcessor tmrpAllItems = new TestMwRevisionProcessor(); dpc.registerMwRevisionProcessor(tmrpAllItems, MwRevision.MODEL_WIKIBASE_ITEM, false); TestEntityDocumentProcessor edpCurrentCounter = new TestEntityDocumentProcessor(); dpc.registerEntityDocumentProcessor(edpCurrentCounter, MwRevision.MODEL_WIKIBASE_ITEM, true); dpc.registerEntityDocumentProcessor(edpCurrentCounter, MwRevision.MODEL_WIKIBASE_PROPERTY, true); TestEntityDocumentProcessor edpAllCounter = new TestEntityDocumentProcessor(); dpc.registerEntityDocumentProcessor(edpAllCounter, MwRevision.MODEL_WIKIBASE_ITEM, false); dpc.registerEntityDocumentProcessor(edpAllCounter, MwRevision.MODEL_WIKIBASE_PROPERTY, false); TestMwRevisionProcessor tmrpAllProperties = new TestMwRevisionProcessor(); dpc.registerMwRevisionProcessor(tmrpAllProperties, MwRevision.MODEL_WIKIBASE_PROPERTY, false); TestMwRevisionProcessor tmrpAllLexemes = new TestMwRevisionProcessor(); dpc.registerMwRevisionProcessor(tmrpAllLexemes, MwRevision.MODEL_WIKIBASE_LEXEME, false); dpc.processDump(dpc.getMostRecentDump(DumpContentType.DAILY)); List revisionsAllItems = new ArrayList<>(); revisionsAllItems.add(getItemRevision(4)); revisionsAllItems.add(getItemRevision(5)); revisionsAllItems.add(getItemRevision(3)); revisionsAllItems.add(getItemRevision(2)); List revisionsAllProperties = new ArrayList<>(); revisionsAllProperties.add(getPropertyRevision(4)); revisionsAllProperties.add(getPropertyRevision(5)); List revisionsAllLexemes = new ArrayList<>(); revisionsAllLexemes.add(getLexemeRevision(9)); revisionsAllLexemes.add(getLexemeRevision(10)); List revisionsAll = new ArrayList<>(revisionsAllItems); revisionsAll.add(getPageRevision(1)); revisionsAll.add(getPageRevision(2)); revisionsAll.addAll(revisionsAllProperties); revisionsAll.addAll(revisionsAllLexemes); List revisionsAllCurrent = new ArrayList<>(); revisionsAllCurrent.add(getItemRevision(5)); revisionsAllCurrent.add(getPageRevision(2)); revisionsAllCurrent.add(getPropertyRevision(5)); revisionsAllCurrent.add(getLexemeRevision(10)); assertEquals("Wikidata Toolkit Test", tmrpAll.siteName); assertEquals(revisionsAll.size(), mwrpAllStats.getTotalRevisionCount()); assertEquals(revisionsAll.size(), mwrpAllStats.getCurrentRevisionCount()); assertEqualRevisionLists(revisionsAll, tmrpAll.revisions, "all"); assertEqualRevisionLists(revisionsAllItems, tmrpAllItems.revisions, "allitems"); assertEqualRevisionLists(revisionsAllCurrent, tmrpAllCurrent.revisions, "allcurrent"); assertEqualRevisionLists(revisionsAllProperties, tmrpAllProperties.revisions, "allproperties"); assertEqualRevisionLists(revisionsAllLexemes, tmrpAllLexemes.revisions, "alllexemes"); assertEquals(revisionsAllItems.size(), edpAllCounter.itemCount); assertEquals(revisionsAllProperties.size(), edpAllCounter.propCount); assertEquals(1, edpCurrentCounter.itemCount); assertEquals(1, edpCurrentCounter.propCount); } @Test public void testMwRecentCurrentDumpFileProcessing() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); mockLocalDumpFile("20140420", 4, DumpContentType.DAILY, dm); mockLocalDumpFile("20140419", 3, DumpContentType.DAILY, dm); mockLocalDumpFile("20140418", 2, DumpContentType.DAILY, dm); mockLocalDumpFile("20140417", 1, DumpContentType.DAILY, dm); mockLocalDumpFile("20140418", 2, DumpContentType.CURRENT, dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); StatisticsMwRevisionProcessor mwrpStats = new StatisticsMwRevisionProcessor( "stats", 2); dpc.registerMwRevisionProcessor(mwrpStats, null, true); dpc.processAllRecentRevisionDumps(); assertEquals(5, mwrpStats.getTotalRevisionCount()); assertEquals(1, mwrpStats.getCurrentRevisionCount()); } @Test public void testMwRecentFullDumpFileProcessing() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); mockLocalDumpFile("20140420", 4, DumpContentType.DAILY, dm); mockLocalDumpFile("20140419", 3, DumpContentType.DAILY, dm); mockLocalDumpFile("20140418", 2, DumpContentType.DAILY, dm); mockLocalDumpFile("20140417", 1, DumpContentType.DAILY, dm); mockLocalDumpFile("20140418", 2, DumpContentType.FULL, dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); StatisticsMwRevisionProcessor mwrpStats = new StatisticsMwRevisionProcessor( "stats", 2); dpc.registerMwRevisionProcessor(mwrpStats, null, false); dpc.processAllRecentRevisionDumps(); assertEquals(19, mwrpStats.getTotalRevisionCount()); assertEquals(5, mwrpStats.getCurrentRevisionCount()); } @Test public void testMwMostRecentFullDumpFileProcessing() throws IOException { Path dmPath = Paths.get(System.getProperty("user.dir")); MockDirectoryManager dm = new MockDirectoryManager(dmPath, true, true); mockLocalDumpFile("20140418", 2, DumpContentType.FULL, dm); DumpProcessingController dpc = new DumpProcessingController( "wikidatawiki"); dpc.downloadDirectoryManager = dm; dpc.setOfflineMode(true); StatisticsMwRevisionProcessor mwrpStats = new StatisticsMwRevisionProcessor( "stats", 2); dpc.registerMwRevisionProcessor(mwrpStats, null, false); dpc.processMostRecentMainDump(); assertEquals(9, mwrpStats.getTotalRevisionCount()); assertEquals(9, mwrpStats.getCurrentRevisionCount()); } } MwLocalDumpFileTest.java000066400000000000000000000117521444772566300343750ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfilespackage org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.testing.MockDirectoryManager; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.util.DirectoryManagerFactory; import static org.junit.Assert.*; public class MwLocalDumpFileTest { MockDirectoryManager dm; Path dmPath; @Before public void setUp() throws Exception { DirectoryManagerFactory .setDirectoryManagerClass(MockDirectoryManager.class); this.dmPath = Paths.get("/").toAbsolutePath(); this.dm = new MockDirectoryManager(this.dmPath, true, true); } @Test public void missingDumpFile() { MwLocalDumpFile df = new MwLocalDumpFile( "/non-existing-dump-file.json.gz"); assertFalse(df.isAvailable()); } @Test public void missingDumpFileDirectory() { MwLocalDumpFile df = new MwLocalDumpFile( "/nonexisting-directory/non-existing-file.json.gz"); assertFalse(df.isAvailable()); } @Test public void testExplicitGetters() throws IOException { this.dm.setFileContents(this.dmPath .resolve("testdump-20150512.json.gz"), ""); MwLocalDumpFile df = new MwLocalDumpFile( "/testdump-20150512.json.gz", DumpContentType.SITES, "20150815", "wikidatawiki"); assertEquals("20150815", df.getDateStamp()); assertEquals("wikidatawiki", df.getProjectName()); assertEquals(DumpContentType.SITES, df.getDumpContentType()); String toString = df.toString(); assertEquals(this.dmPath.resolve("testdump-20150512.json.gz"), df.getPath()); assertTrue(toString.contains("20150815")); assertTrue(toString.contains("wikidatawiki")); assertTrue(toString.toLowerCase().contains( DumpContentType.SITES.toString().toLowerCase())); } @Test public void testGuessJsonDumpAndDate() throws IOException { this.dm.setFileContents(this.dmPath .resolve("testdump-20150512.json.gz"), ""); MwLocalDumpFile df = new MwLocalDumpFile( "/testdump-20150512.json.gz"); assertTrue(df.isAvailable()); assertEquals("20150512", df.getDateStamp()); assertEquals("LOCAL", df.getProjectName()); assertEquals(df.getDumpContentType(), DumpContentType.JSON); } @Test public void testJsonReader() throws IOException { this.dm.setFileContents(this.dmPath .resolve("testdump-20150512.json.gz"), "Test contents", CompressionType.GZIP); MwLocalDumpFile df = new MwLocalDumpFile( "/testdump-20150512.json.gz"); BufferedReader br = df.getDumpFileReader(); assertEquals("Test contents", br.readLine()); assertNull(br.readLine()); } @Test(expected = IOException.class) public void testUnavailableReader() throws IOException { MwLocalDumpFile df = new MwLocalDumpFile( "/testdump-20150512.json.gz"); df.getDumpFileReader(); } @Test public void testGuessSitesDump() throws IOException { this.dm.setFileContents(this.dmPath.resolve("test.sql.gz"), ""); MwLocalDumpFile df = new MwLocalDumpFile("/test.sql.gz"); assertTrue(df.isAvailable()); assertEquals("YYYYMMDD", df.getDateStamp()); assertEquals(df.getDumpContentType(), DumpContentType.SITES); } @Test public void testGuessFullDump() throws IOException { this.dm.setFileContents(this.dmPath.resolve("test.xml.bz2"), ""); MwLocalDumpFile df = new MwLocalDumpFile("/test.xml.bz2"); assertTrue(df.isAvailable()); assertEquals(df.getDumpContentType(), DumpContentType.FULL); } @Test public void testGuessDailyDump() throws IOException { this.dm.setFileContents( this.dmPath.resolve("daily-dump.xml.bz2"), ""); MwLocalDumpFile df = new MwLocalDumpFile("/daily-dump.xml.bz2"); assertTrue(df.isAvailable()); assertEquals(df.getDumpContentType(), DumpContentType.DAILY); } @Test public void testGuessCurrentDump() throws IOException { this.dm.setFileContents( this.dmPath.resolve("current-dump.xml.bz2"), ""); MwLocalDumpFile df = new MwLocalDumpFile( "/current-dump.xml.bz2"); assertTrue(df.isAvailable()); assertEquals(df.getDumpContentType(), DumpContentType.CURRENT); } @Test public void testGuessUnknownDumpType() throws IOException { this.dm.setFileContents(this.dmPath.resolve("current-dump"), ""); MwLocalDumpFile df = new MwLocalDumpFile("/current-dump"); assertTrue(df.isAvailable()); assertEquals(df.getDumpContentType(), DumpContentType.JSON); } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/SitesTest.java000066400000000000000000000066111444772566300325550ustar00rootroot00000000000000package org.wikidata.wdtk.dumpfiles; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.io.IOException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.implementation.SitesImpl; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Sites; import org.wikidata.wdtk.dumpfiles.wmf.WmfDumpFile; import org.wikidata.wdtk.testing.MockDirectoryManager; import org.wikidata.wdtk.testing.MockStringContentFactory; public class SitesTest { MockDirectoryManager dm; Path dmPath; DumpProcessingController dpc; @Before public void setUp() throws IOException { this.dmPath = Paths.get(System.getProperty("user.dir")); this.dm = new MockDirectoryManager(this.dmPath, true, true); this.dpc = new DumpProcessingController("wikidatawiki"); this.dpc.downloadDirectoryManager = this.dm; } @Test public void getSiteInformation() throws IOException { Path dumpFilePath = this.dmPath.resolve("dumpfiles").resolve( "wikidatawiki"); Path thisDumpPath = dumpFilePath.resolve(DumpContentType.SITES .toString().toLowerCase() + "-" + "20140420"); dm.setDirectory(dumpFilePath); dm.setDirectory(thisDumpPath); URL resourceUrl = this.getClass().getResource( "/wikidatawiki-20140420-sites.sql"); Path filePath = thisDumpPath.resolve("wikidatawiki-" + "20140420" + WmfDumpFile.getDumpFilePostfix(DumpContentType.SITES)); dm.setFileContents(filePath, MockStringContentFactory.getStringFromUrl(resourceUrl), WmfDumpFile.getDumpFileCompressionType(filePath.toString())); this.dpc.setOfflineMode(true); DataObjectFactory factory = new DataObjectFactoryImpl(); SiteLink siteLink = factory.getSiteLink("Douglas Adams", "dewiki", Collections.emptyList()); Sites sites = this.dpc.getSitesInformation(); assertEquals("en", sites.getLanguageCode("enwikivoyage")); // Test sites with protocol-relative URLs: assertEquals(SitesImpl.DEFAULT_PROTOCOL_PREFIX + "//de.wikipedia.org/wiki/Douglas_Adams", sites.getSiteLinkUrl(siteLink)); assertEquals( SitesImpl.DEFAULT_PROTOCOL_PREFIX + "//ar.wikipedia.org/wiki/%D8%AF%D9%88%D8%BA%D9%84%D8%A7%D8%B3_%D8%A2%D8%AF%D9%85%D8%B2", sites.getPageUrl("arwiki", "دوغلاس_آدمز")); assertEquals(SitesImpl.DEFAULT_PROTOCOL_PREFIX + "//en.wikipedia.org/w/api.php", sites.getFileUrl("enwiki", "api.php")); // Site with explicit http URL: assertEquals("http://aa.wikipedia.org/wiki/Test", sites.getPageUrl("aawiki", "Test")); } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmf/000077500000000000000000000000001444772566300305505ustar00rootroot00000000000000WmfDumpFileManagerTest.java000066400000000000000000000332371444772566300356560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.io.InputStream; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.dumpfiles.MwDumpFile; import org.wikidata.wdtk.dumpfiles.MwDumpFileProcessor; import org.wikidata.wdtk.testing.MockDirectoryManager; import org.wikidata.wdtk.testing.MockStringContentFactory; import org.wikidata.wdtk.testing.MockWebResourceFetcher; public class WmfDumpFileManagerTest { MockWebResourceFetcher wrf; MockDirectoryManager dm; Path dmPath; /** * Helper class to test dump file processing capabilities. * * @author Markus Kroetzsch * */ static class TestDumpfileProcessor implements MwDumpFileProcessor { String result = ""; @Override public void processDumpFileContents(InputStream inputStream, MwDumpFile dumpFile) { try { result = result + MockStringContentFactory .getStringFromInputStream(inputStream) + "\n"; } catch (IOException e) { throw new RuntimeException(e); } } } @Before public void setUp() throws IOException { this.wrf = new MockWebResourceFetcher(); this.dmPath = Paths.get(System.getProperty("user.dir")); this.dm = new MockDirectoryManager(this.dmPath, true, false); } /** * Helper method to create mocked local dump files. * * @param dateStamp * @param dumpContentType * @param isDone * @throws IOException */ void setLocalDump(String dateStamp, DumpContentType dumpContentType, boolean isDone) throws IOException { Path dumpFilePath = this.dmPath.resolve("dumpfiles").resolve( "wikidatawiki"); Path thisDumpPath = dumpFilePath.resolve(dumpContentType.toString() .toLowerCase() + "-" + dateStamp); dm.setDirectory(thisDumpPath); if (isDone) { Path filePath = thisDumpPath.resolve(WmfDumpFile.getDumpFileName( dumpContentType, "wikidatawiki", dateStamp)); dm.setFileContents(filePath, "Contents of " + dumpContentType.toString().toLowerCase() + " " + dateStamp, WmfDumpFile.getDumpFileCompressionType(filePath.toString())); } } @Test public void getAllDailyDumps() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/other/incr/wikidatawiki/", "/other-incr-wikidatawiki-index.html", this.getClass()); setLocalDump("20140220", DumpContentType.DAILY, true); setLocalDump("20140219", DumpContentType.CURRENT, true); setLocalDump("20140215", DumpContentType.DAILY, false); setLocalDump("20140205", DumpContentType.DAILY, true); setLocalDump("nodate", DumpContentType.DAILY, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, wrf); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.DAILY); String[] dumpDates = { "20140221", "20140220", "20140219", "20140218", "20140217", "20140216", "20140215", "20140214", "20140213", "20140212", "20140211", "20140210", "20140209", "20140208", "20140205" }; boolean[] dumpIsLocal = { false, true, false, false, false, false, false, false, false, false, false, false, false, false, true }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.DAILY); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); if (dumpIsLocal[i]) { assertTrue( "Dumpfile " + dumpFiles.get(i) + " should be local.", dumpFiles.get(i) instanceof WmfLocalDumpFile); } else { assertTrue("Dumpfile " + dumpFiles.get(i) + " should be online.", dumpFiles.get(i) instanceof WmfOnlineDailyDumpFile); } } } @Test public void getAllJsonDumps() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/other/wikidata/", "/other-wikidata-index.html", this.getClass()); setLocalDump("20141110", DumpContentType.JSON, true); setLocalDump("20150105", DumpContentType.CURRENT, true); setLocalDump("20141201", DumpContentType.JSON, true); setLocalDump("nodate", DumpContentType.JSON, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, wrf); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.JSON); String[] dumpDates = { "20150112", "20150105", "20141229", "20141222", "20141215", "20141210", "20141201", "20141124", "20141117", "20141110" }; boolean[] dumpIsLocal = { false, false, false, false, false, false, true, false, false, true }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.JSON); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); if (dumpIsLocal[i]) { assertTrue( "Dumpfile " + dumpFiles.get(i) + " should be local.", dumpFiles.get(i) instanceof WmfLocalDumpFile); } else { assertTrue("Dumpfile " + dumpFiles.get(i) + " should be online.", dumpFiles.get(i) instanceof JsonOnlineDumpFile); } } } @Test public void getAllCurrentDumps() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/", "/wikidatawiki-index-old.html", this.getClass()); setLocalDump("20140210", DumpContentType.CURRENT, false); setLocalDump("20140123", DumpContentType.CURRENT, true); setLocalDump("20140106", DumpContentType.DAILY, true); setLocalDump("20131201", DumpContentType.CURRENT, true); setLocalDump("nodate", DumpContentType.CURRENT, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, wrf); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.CURRENT); String[] dumpDates = { "20140210", "20140123", "20140106", "20131221", "20131201" }; boolean[] dumpIsLocal = { false, true, false, false, true }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.CURRENT); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); if (dumpIsLocal[i]) { assertTrue(dumpFiles.get(i) instanceof WmfLocalDumpFile); } else { assertTrue(dumpFiles.get(i) instanceof WmfOnlineStandardDumpFile); } } } @Test public void getAllFullDumps() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/", "/wikidatawiki-index-old.html", this.getClass()); setLocalDump("20140210", DumpContentType.FULL, false); setLocalDump("20140123", DumpContentType.FULL, true); setLocalDump("20140106", DumpContentType.CURRENT, true); setLocalDump("20131201", DumpContentType.FULL, true); setLocalDump("nodate", DumpContentType.FULL, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, wrf); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.FULL); String[] dumpDates = { "20140210", "20140123", "20140106", "20131221", "20131201" }; boolean[] dumpIsLocal = { false, true, false, false, true }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.FULL); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); if (dumpIsLocal[i]) { assertTrue(dumpFiles.get(i) instanceof WmfLocalDumpFile); } else { assertTrue(dumpFiles.get(i) instanceof WmfOnlineStandardDumpFile); } } } @Test public void getAllDailyDumpsOffline() throws IOException { setLocalDump("20140220", DumpContentType.DAILY, true); setLocalDump("20140205", DumpContentType.DAILY, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, null); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.DAILY); String[] dumpDates = { "20140220", "20140205" }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.DAILY); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); assertTrue(dumpFiles.get(i) instanceof WmfLocalDumpFile); } } @Test public void getAllCurrentDumpsOffline() throws IOException { setLocalDump("20140220", DumpContentType.CURRENT, true); setLocalDump("20140205", DumpContentType.CURRENT, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, null); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.CURRENT); String[] dumpDates = { "20140220", "20140205" }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.CURRENT); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); assertTrue(dumpFiles.get(i) instanceof WmfLocalDumpFile); } } @Test public void getAllFullDumpsOffline() throws IOException { setLocalDump("20140220", DumpContentType.FULL, true); setLocalDump("20140205", DumpContentType.FULL, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, null); List dumpFiles = dumpFileManager .findAllDumps(DumpContentType.FULL); String[] dumpDates = { "20140220", "20140205" }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.FULL); assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); assertTrue(dumpFiles.get(i) instanceof WmfLocalDumpFile); } } @Test public void getAllRelevantDumps() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/other/incr/wikidatawiki/", "/other-incr-wikidatawiki-index.html", this.getClass()); wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/", "/wikidatawiki-index-old.html", this.getClass()); wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-md5sums.txt", "/wikidatawiki-20140210-md5sums.txt", this.getClass()); setLocalDump("20140220", DumpContentType.DAILY, true); setLocalDump("20140219", DumpContentType.FULL, true); setLocalDump("20140205", DumpContentType.DAILY, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, wrf); List dumpFiles = dumpFileManager .findAllRelevantRevisionDumps(true); String[] dumpDates = { "20140221", "20140220", "20140219", "20140218", "20140217", "20140216", "20140215", "20140214", "20140213", "20140212", "20140211", "20140210" }; boolean[] dumpIsLocal = { false, true, false, false, false, false, false, false, false, false, false, false }; assertEquals(dumpFiles.size(), dumpDates.length); for (int i = 0; i < dumpFiles.size(); i++) { if (i == dumpFiles.size() - 1) { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.CURRENT); } else { assertEquals(dumpFiles.get(i).getDumpContentType(), DumpContentType.DAILY); } assertEquals(dumpFiles.get(i).getDateStamp(), dumpDates[i]); if (dumpIsLocal[i]) { assertTrue(dumpFiles.get(i) instanceof WmfLocalDumpFile); } else { assertFalse(dumpFiles.get(i) instanceof WmfLocalDumpFile); } } } @Test public void getAllRelevantDumpsMainDumpMissing() throws IOException { setLocalDump("20140220", DumpContentType.DAILY, true); setLocalDump("20140210", DumpContentType.CURRENT, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, wrf); List dumpFiles = dumpFileManager .findAllRelevantRevisionDumps(false); assertEquals(dumpFiles.size(), 1); assertEquals(dumpFiles.get(0).getDumpContentType(), DumpContentType.DAILY); assertEquals(dumpFiles.get(0).getDateStamp(), "20140220"); assertTrue(dumpFiles.get(0) instanceof WmfLocalDumpFile); } @Test public void processAllRelevantDumps() throws IOException { setLocalDump("20140221", DumpContentType.DAILY, true); setLocalDump("20140220", DumpContentType.DAILY, true); setLocalDump("20140219", DumpContentType.CURRENT, true); WmfDumpFileManager dumpFileManager = new WmfDumpFileManager( "wikidatawiki", dm, null); TestDumpfileProcessor dfp = new TestDumpfileProcessor(); for (MwDumpFile dumpFile : dumpFileManager .findAllRelevantRevisionDumps(true)) { dfp.processDumpFileContents(dumpFile.getDumpFileStream(), dumpFile); } assertEquals( dfp.result, "Contents of daily 20140221\nContents of daily 20140220\nContents of current 20140219\n"); } } WmfDumpFileTest.java000066400000000000000000000024121444772566300343520ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import org.wikidata.wdtk.util.CompressionType; import static org.junit.Assert.assertEquals; public class WmfDumpFileTest { @Test public void getDumpFileCompressionType() { assertEquals(WmfDumpFile.getDumpFileCompressionType("foo.tar.gz"), CompressionType.GZIP); assertEquals(WmfDumpFile.getDumpFileCompressionType("bar.txt.bz2"), CompressionType.BZ2); assertEquals(WmfDumpFile.getDumpFileCompressionType("baz.txt"), CompressionType.NONE); assertEquals(WmfDumpFile.getDumpFileCompressionType("bat.txt"), CompressionType.NONE); } } WmfLocalDumpFileTest.java000066400000000000000000000043151444772566300353310ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertFalse; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.testing.MockDirectoryManager; public class WmfLocalDumpFileTest { MockDirectoryManager dm; Path dmPath; @Before public void setUp() throws Exception { this.dmPath = Paths.get(System.getProperty("user.dir")) .resolve("dumpfiles").resolve("wikidatawiki"); this.dm = new MockDirectoryManager(this.dmPath, true, true); } @Test(expected = IllegalArgumentException.class) public void directoryDoesNotExist() { new WmfLocalDumpFile("20140220", "wikidatawiki", dm, DumpContentType.DAILY); } @Test(expected = IllegalArgumentException.class) public void directoryNotReadable() throws IOException { MockDirectoryManager dm = Mockito.mock(MockDirectoryManager.class); Mockito.when(dm.hasSubdirectory("daily-20140220")).thenReturn(true); Mockito.doThrow(new IOException()).when(dm) .getSubdirectoryManager("daily-20140220"); new WmfLocalDumpFile("20140220", "wikidatawiki", dm, DumpContentType.DAILY); } @Test public void missingDumpFile() throws IOException { Path thisDumpPath = this.dmPath.resolve("daily-20140220"); dm.setDirectory(thisDumpPath); WmfLocalDumpFile dumpFile = new WmfLocalDumpFile("20140220", "wikidatawiki", dm, DumpContentType.DAILY); assertFalse(dumpFile.isAvailable()); } } WmfOnlineDailyDumpFileTest.java000066400000000000000000000103751444772566300365110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Paths; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.testing.MockDirectoryManager; import org.wikidata.wdtk.testing.MockWebResourceFetcher; import org.wikidata.wdtk.util.CompressionType; import static org.junit.Assert.*; public class WmfOnlineDailyDumpFileTest { MockWebResourceFetcher wrf; MockDirectoryManager dm; @Before public void setUp() throws IOException { dm = new MockDirectoryManager( Paths.get(System.getProperty("user.dir")), true, false); wrf = new MockWebResourceFetcher(); } @Test public void validDumpProperties() throws IOException { String dateStamp = "20140220"; wrf.setWebResourceContents( "https://dumps.wikimedia.org/other/incr/wikidatawiki/" + dateStamp + "/status.txt", "done"); wrf.setWebResourceContents( "https://dumps.wikimedia.org/other/incr/wikidatawiki/" + dateStamp + "/wikidatawiki-" + dateStamp + "-pages-meta-hist-incr.xml.bz2", "Line1", CompressionType.BZ2); WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp, "wikidatawiki", wrf, dm); BufferedReader br = dump.getDumpFileReader(); assertEquals(br.readLine(), "Line1"); assertNull(br.readLine()); assertTrue(dump.isAvailable()); assertTrue(dump.isAvailable()); // second time should use cached entry assertEquals(dateStamp, dump.getDateStamp()); assertEquals("wikidatawiki", dump.getProjectName()); assertEquals("wikidatawiki-daily-" + dateStamp, dump.toString()); assertEquals(DumpContentType.DAILY, dump.getDumpContentType()); } @Test public void missingDumpProperties() { String dateStamp = "20140220"; WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp, "wikidatawiki", wrf, dm); assertFalse(dump.isAvailable()); assertEquals(dateStamp, dump.getDateStamp()); } @Test public void emptyDumpProperties() throws IOException { String dateStamp = "20140220"; wrf.setWebResourceContents( "http://dumps.wikimedia.org/other/incr/wikidatawiki/" + dateStamp + "/status.txt", ""); WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp, "wikidatawiki", wrf, dm); assertFalse(dump.isAvailable()); assertEquals(dateStamp, dump.getDateStamp()); } @Test public void inaccessibleStatus() throws IOException { String dateStamp = "20140220"; wrf.setWebResourceContents( "http://dumps.wikimedia.org/other/incr/wikidatawiki/" + dateStamp + "/status.txt", "done"); wrf.setReturnFailingReaders(true); WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp, "wikidatawiki", wrf, dm); assertFalse(dump.isAvailable()); } @Test(expected = IOException.class) public void downloadNoRevisionId() throws IOException { String dateStamp = "20140220"; wrf.setWebResourceContents( "http://dumps.wikimedia.org/other/incr/wikidatawiki/" + dateStamp + "/wikidatawiki-" + dateStamp + "-pages-meta-hist-incr.xml.bz2", "Line1", CompressionType.BZ2); WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp, "wikidatawiki", wrf, dm); dump.getDumpFileReader(); } @Test(expected = IOException.class) public void downloadNoDumpFile() throws IOException { String dateStamp = "20140220"; wrf.setWebResourceContents( "http://dumps.wikimedia.org/other/incr/wikidatawiki/" + dateStamp + "/status.txt", "done"); WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp, "wikidatawiki", wrf, dm); dump.getDumpFileReader(); } } WmfOnlineStandardDumpFileTest.java000066400000000000000000000150031444772566300372000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmfpackage org.wikidata.wdtk.dumpfiles.wmf; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Paths; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.dumpfiles.MwDumpFile; import org.wikidata.wdtk.testing.MockDirectoryManager; import org.wikidata.wdtk.testing.MockWebResourceFetcher; import org.wikidata.wdtk.util.CompressionType; import static org.junit.Assert.*; public class WmfOnlineStandardDumpFileTest { MockWebResourceFetcher wrf; MockDirectoryManager dm; @Before public void setUp() throws IOException { dm = new MockDirectoryManager( Paths.get(System.getProperty("user.dir")), true, false); wrf = new MockWebResourceFetcher(); } @Test public void validCurrentDumpPropertiesOldFormat() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/20140210/", "/wikidatawiki-20140210-index.html", this.getClass()); wrf.setWebResourceContents( "https://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-pages-meta-current.xml.bz2", "Line1", CompressionType.BZ2); wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-md5sums.txt", "/wikidatawiki-20140210-md5sums.txt", this.getClass()); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.CURRENT); BufferedReader br = dump.getDumpFileReader(); assertEquals(br.readLine(), "Line1"); assertNull(br.readLine()); assertTrue(dump.isAvailable()); assertEquals("20140210", dump.getDateStamp()); assertEquals(DumpContentType.CURRENT, dump.getDumpContentType()); } @Test public void validCurrentDumpPropertiesNewFormat() throws IOException { wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/20140210/", "/wikidatawiki-20140508-index.html", this.getClass()); wrf.setWebResourceContents( "https://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-pages-meta-current.xml.bz2", "Line1", CompressionType.BZ2); wrf.setWebResourceContentsFromResource( "https://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-md5sums.txt", "/wikidatawiki-20140210-md5sums.txt", this.getClass()); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.CURRENT); BufferedReader br = dump.getDumpFileReader(); assertEquals(br.readLine(), "Line1"); assertNull(br.readLine()); assertTrue(dump.isAvailable()); assertEquals("20140210", dump.getDateStamp()); assertEquals(DumpContentType.CURRENT, dump.getDumpContentType()); } @Test public void missingFullDumpProperties() { MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.FULL); assertFalse(dump.isAvailable()); assertEquals("20140210", dump.getDateStamp()); } @Test public void inaccessibleCurrentDumpProperties() throws IOException { wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/", "/wikidatawiki-20140210-index.html", this.getClass()); wrf.setWebResourceContents( "http://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-pages-meta-current.xml.bz2", "Line1"); wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-md5sums.txt", "/wikidatawiki-20140210-md5sums.txt", this.getClass()); wrf.setReturnFailingReaders(true); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.CURRENT); assertFalse(dump.isAvailable()); } @Test public void emptyFullDumpIsDone() throws IOException { wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/", "/wikidatawiki-20140210-index.html", this.getClass()); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.FULL); assertFalse(dump.isAvailable()); assertEquals("20140210", dump.getDateStamp()); assertEquals(DumpContentType.FULL, dump.getDumpContentType()); } @Test(expected = IOException.class) public void downloadNoRevisionId() throws IOException { wrf.setWebResourceContents( "http://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-pages-meta-current.xml.bz2", "Line1"); wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-md5sums.txt", "/wikidatawiki-20140210-md5sums.txt", this.getClass()); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.FULL); dump.getDumpFileReader(); } @Test(expected = IOException.class) public void downloadNoMd5sum() throws IOException { wrf.setWebResourceContents( "http://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-pages-meta-current.xml.bz2", "Line1"); wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/", "/wikidatawiki-20140210-index.html", this.getClass()); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.FULL); dump.getDumpFileReader(); } @Test(expected = IOException.class) public void downloadNoDumpFile() throws IOException { wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/", "/wikidatawiki-20140210-index.html", this.getClass()); wrf.setWebResourceContentsFromResource( "http://dumps.wikimedia.org/wikidatawiki/20140210/wikidatawiki-20140210-md5sums.txt", "/wikidatawiki-20140210-md5sums.txt", this.getClass()); MwDumpFile dump = new WmfOnlineStandardDumpFile("20140210", "wikidatawiki", wrf, dm, DumpContentType.CURRENT); dump.getDumpFileReader(); } } Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/000077500000000000000000000000001444772566300235235ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/empty-dump.xml000066400000000000000000000042761444772566300263570ustar00rootroot00000000000000 Wikidata http://www.wikidata.org/wiki/Main_Page MediaWiki 1.23wmf14 first-letter Media Special Talk User User talk Wikidata Wikidata talk File File talk MediaWiki MediaWiki talk Template Template talk Help Help talk Category Category talk Property Property talk Query Query talk Module Module talk Translations Translations talk Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-for-long-testing.json000066400000000000000000001775361444772566300315310ustar00rootroot00000000000000[ {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q1","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1454986}},"datatype":"wikibase-item"},"id":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q8","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":331769}},"datatype":"wikibase-item"},"id":"q8$E2EFA381-BA5D-4F52-AF74-660B9A044C1E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":9415}},"datatype":"wikibase-item"},"id":"q8$5DB9C22E-4D86-4FB1-AC36-5C75CC806D6A","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P16","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P19","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608756}},"datatype":"wikibase-item"},"id":"P19$84157cf4-4650-a50a-72ce-fbaa254e7f34","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P19$affa32c3-4cd8-1842-bc28-35b3288cce5d","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18635217}},"datatype":"wikibase-item"},"id":"P19$4a666229-40c1-474f-a5a0-7ff90f767207","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P22","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P22$78f8ae50-4e81-55cb-119a-f5c49b828c37","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q23","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"q23$935f9100-47ca-f387-7946-45f9db09e81f","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q24","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":15632617}},"datatype":"wikibase-item"},"id":"q24$4E4D9DE3-CDC7-48E5-A644-18489D523EA1","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q31","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q31$B3580D37-D30E-4BC6-A591-2CE49253CAB4","type":"statement"},{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":43702}},"datatype":"wikibase-item"},"id":"q31$D548FD84-0A96-4954-B672-1C4A4943DEE0","type":"statement"},{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q31$C082AE58-D018-4E46-89E5-1130E01F7632","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q31$5c117765-4eb5-0073-f352-9a9b81f21f59","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q31$d3b6fe0e-4a05-0834-4959-ad43f9b47c99","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q31$84b4d16f-4773-ea52-e851-3abe5ea24089","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P31","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q33","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P304":[{"snaktype":"value","property":"P304","datavalue":{"type":"string","value":"603"}}],"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":14334357}},"datatype":"wikibase-item"}],"P1683":[{"snaktype":"value","property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"fi","text":"Joulukuun kuudentena päivänä vuonna 1917 Suomen eduskunta hyväksyi senaatin ilmoituksen siitä, että Suomi oli nyt itsenäinen."}},"datatype":"monolingualtext"}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":14334357}},"datatype":"wikibase-item"},{"property":"P304","datavalue":{"type":"string","value":"603"}},{"property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"fi","text":"Joulukuun kuudentena päivänä vuonna 1917 Suomen eduskunta hyväksyi senaatin ilmoituksen siitä, että Suomi oli nyt itsenäinen."}},"datatype":"monolingualtext"}],"snaks-order":["P248","P304","P1683"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001917-12-06T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q33$CBE1D73C-6F18-45E6-A437-7657B825E87E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q33$1D955803-700D-4B70-997F-2ABB4C084EB2","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001995-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q33$81CCBEAB-A5E7-404A-B7E3-E46B240E179F","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001955-12-14T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q33$0888ad3b-482b-1629-7deb-a9394955ce7a","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001989-05-05T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q33$54d5a285-4fd3-82a3-57ae-9b12b7ab2148","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":179164}},"datatype":"wikibase-item"},"id":"Q33$cdab5cb1-4e80-6b08-7f5b-bbbacc3db6ca","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P35","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P36","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615777}},"datatype":"wikibase-item"},"id":"P36$b05e52e4-49bb-04bc-0efe-c857f4ee6010","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P37","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18616084}},"datatype":"wikibase-item"},"id":"P37$96d50800-4435-2965-dfbc-337388287f9f","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P39","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P39$5012a41d-40d0-cbfb-24b5-3a416b151549","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q42","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P214":[{"snaktype":"value","property":"P214","datavalue":{"type":"string","value":"113230702"}}],"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":54919}},"datatype":"wikibase-item"}],"P813":[{"snaktype":"value","property":"P813","datavalue":{"type":"time","value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":54919}},"datatype":"wikibase-item"},{"property":"P214","datavalue":{"type":"string","value":"113230702"}},{"property":"P813","datavalue":{"type":"time","value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}],"snaks-order":["P248","P214","P813"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"Q42$F078E5B3-F9A8-480E-B7AC-D97778CBBEF9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q45","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q45$7B1056BA-FDFB-49D6-9F83-0801DDC93535","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q45$26b2860a-4d55-fe91-d6ef-1419a7573981","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q45$b143acfe-4531-1b56-22f4-ec5f6045d473","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q51","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5107}},"datatype":"wikibase-item"},"id":"q51$1156F414-87F1-46BB-BABA-8AAF3DA0F45C","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P53","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P53$a9daf028-4f00-5f35-f778-364f501e985e","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P54","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P54$df12171d-484f-7891-b0ed-6981024f2e23","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q55","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":15304003}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001954-12-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q55$E3418C6B-2F14-4FF2-AE16-C3B3C09F9C88","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1763527}},"datatype":"wikibase-item"},"id":"Q55$474B29F2-F2A6-49AD-A1EC-7CD0C757D3E4","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7275}},"datatype":"wikibase-item"},"id":"Q55$0c63c3a1-48b0-6eaf-9edc-dd024518ae4c","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q58","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q62","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":13218382}},"datatype":"wikibase-item"},"id":"q62$C535861D-BC41-4B6E-93AD-0C46B961B700","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1549591}},"datatype":"wikibase-item"},"id":"Q62$fd2fa7d3-4fa9-e44c-fd24-547380174bbc","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"Q62$b21e6bb2-4bb5-7692-d436-d2d22d6bf063","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q68","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":12468333}},"datatype":"wikibase-item"},"id":"Q68$2C1D1AAF-A295-403E-AFCB-EB902DB5762F","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q75","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1301371}},"datatype":"wikibase-item"},"id":"q75$09CE92CB-C019-4E99-B6A5-4460B5DC1AA2","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P81","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q83","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6686945}},"datatype":"wikibase-item"},"id":"q83$63673D69-9CBD-43E9-AF03-F93523C223E0","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":341}},"datatype":"wikibase-item"},"id":"Q83$6563c8b1-4db3-1231-dbd2-ae02b360c877","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P88","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18618644}},"datatype":"wikibase-item"},"id":"P88$2ddcc1f9-4b77-6b35-5716-0f3f5828c680","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P92","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q99","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":35657}},"datatype":"wikibase-item"},"id":"Q99$AF13EDE9-BD02-43E2-BEE0-1B50977E5247","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q100","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"q100$F5900CEA-A5F7-4600-B7C4-041C6B1F4D3E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1549591}},"datatype":"wikibase-item"},"id":"Q100$ad5b329b-43c9-f6d9-9d0b-a08c1f4f0abb","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q102","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":101991}},"datatype":"wikibase-item"},"id":"q102$8B5A6E17-E645-4F94-AE93-C02515B608F7","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P102","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P102$e96b18d8-4edd-5073-855e-655584de60ac","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q103","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7366}},"datatype":"wikibase-item"},"id":"q103$e723bd53-46db-4968-8595-5537b9c1e707","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P105","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18609040}},"datatype":"wikibase-item"},"id":"P105$46bcec0c-4854-2a1e-c099-949d9cbd9e03","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P108","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P108$5cd59d86-4bfe-c033-7e9e-7103c78f6dd0","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"commonsMedia","id":"P109","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P109$58e62b48-489a-5c14-c77d-1673c1320d15","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P111","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P118","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P119","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608756}},"datatype":"wikibase-item"},"id":"P119$4bf19ee9-4e0b-a15a-405c-8d1afaaa3384","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P119$d9136aab-4751-613f-5306-5df61c98c660","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18635217}},"datatype":"wikibase-item"},"id":"P119$9b6ba302-4c78-5a6b-22e7-8d9ab18c8c01","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P121","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q125","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q129","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":41825}},"datatype":"wikibase-item"},"id":"Q129$1101349A-38FB-45F0-B365-60CD8EEE0ACA","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P131","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615777}},"datatype":"wikibase-item"},"id":"P131$bbea2c26-4fbe-11ba-abba-08baf0ace03a","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q136","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":208440}},"datatype":"wikibase-item"},"id":"q136$64A9A736-8806-47F7-9FB6-686A7DBB87B0","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q140","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":16521}},"datatype":"wikibase-item"},"id":"q140$8EE98E5B-4A9C-4BF5-B456-FB77E8EE4E69","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P143","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608359}},"datatype":"wikibase-item"},"id":"P143$3ff1afa0-4d3e-256c-8efb-d624229044b4","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q144","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":16521}},"datatype":"wikibase-item"},"id":"Q144$3bf4b72c-4d23-6a49-eced-b9a17b81b7d9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q147","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q148","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q148$F54994F8-1C4C-4929-8CB3-91DCED2A4BDB","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q148$4454ff59-46b6-f73b-df46-d0c03c4505f6","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6688521}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001971-10-25T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q148$9ceccd08-433f-7fba-a5fd-dabf52e6c8e4","type":"statement"},{"rank":"normal","references":[{"snaks":{"P356":[{"snaktype":"value","property":"P356","datavalue":{"type":"string","value":"10.1007/s11127-009-9491-2"}}]},"allSnaks":[{"property":"P356","datavalue":{"type":"string","value":"10.1007/s11127-009-9491-2"}}],"snaks-order":["P356"]},{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.systemicpeace.org/polity/China2010.pdf"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.systemicpeace.org/polity/China2010.pdf"}}],"snaks-order":["P854"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":317}},"datatype":"wikibase-item"},"id":"Q148$c3a4dafb-4062-9e4b-746b-c578822f6dc9","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P149","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q155","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q155$3BF35646-D69E-4BC3-843A-2BFDFEC6D9BB","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6814224}},"datatype":"wikibase-item"},"id":"q155$3147400A-6A8C-48DE-99EB-29F713B4D69E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q155$9740be48-4542-bbea-41b3-fb254858e780","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":859563}},"datatype":"wikibase-item"},"id":"q155$8774ac4a-4da3-0343-7785-b4c161eb89d2","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":4209223}},"datatype":"wikibase-item"},"id":"q155$c87f5e5d-4025-ff4b-b1cd-02760cac0ad2","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q155$c9d9c33f-4ea8-9ff0-269e-f68a51d3f343","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6316828}},"datatype":"wikibase-item"},"id":"Q155$b1db9fc5-486e-8f33-03e1-1399f91d2023","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P155","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615033}},"datatype":"wikibase-item"},"id":"P155$0e2372e9-4af6-6995-fe43-9f925c576afc","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q156","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":174211}},"datatype":"wikibase-item"},"id":"Q156$F86CD234-50BD-43CC-AE32-67065432CF80","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q163","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1138494}},"datatype":"wikibase-item"},"id":"Q163$038a9f31-4821-d78c-9317-b2f7b6da9912","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P163","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q166","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":165}},"datatype":"wikibase-item"},"id":"q166$4F2C4ECC-4E3F-4D97-8830-37A11FFBACE2","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P166","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P166$62623b1f-41b2-e961-ae2f-0598e86abde7","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P169","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P171","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18609040}},"datatype":"wikibase-item"},"id":"P171$8bd4f9b1-4a52-cb35-eeaf-7c3199567c24","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P175","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18618644}},"datatype":"wikibase-item"},"id":"P175$3fda1655-4092-134c-5485-c6cde5a467d1","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q177","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":2095}},"datatype":"wikibase-item"},"id":"Q177$F50982AF-5183-47CD-AE5D-80F1FE2751ED","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q178","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":192874}},"datatype":"wikibase-item"},"id":"Q178$C6F03114-8653-4FC6-B348-9186A3E2E27E","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P178","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q183","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.documentarchiv.de/brd/dtlvertrag.html"}}],"P1683":[{"snaktype":"value","property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.documentarchiv.de/brd/dtlvertrag.html"}},{"property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}],"snaks-order":["P854","P1683"]},{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1206379}},"datatype":"wikibase-item"}],"P1683":[{"snaktype":"value","property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1206379}},"datatype":"wikibase-item"},{"property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}],"snaks-order":["P248","P1683"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q183$7F17CEF2-2897-4C4D-9D1C-B31DC979AF5C","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11122}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"DER PRÄSIDENT DER BUNDESREPUBLIK DEUTSCHLAND, [.]HABEN BESCHLOSSEN, eine Europäische Union zu gründen[.]"}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11122}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"DER PRÄSIDENT DER BUNDESREPUBLIK DEUTSCHLAND, [.]HABEN BESCHLOSSEN, eine Europäische Union zu gründen[.]"}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q183$03C2E14C-6F87-4B91-81AD-2F5D1CC63DAC","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q183$686c68bb-45a2-c75c-04a9-b287b46e85c8","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":43702}},"datatype":"wikibase-item"},"id":"q183$644285f0-4d8d-5ccc-c142-262260f1abee","type":"statement"},{"rank":"normal","references":[{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.un.org/en/members/index.shtml#g"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.un.org/en/members/index.shtml#g"}}],"snaks-order":["P854"]},{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Mitgliedstaaten_der_Vereinten_Nationen"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Mitgliedstaaten_der_Vereinten_Nationen"}}],"snaks-order":["P854"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q183$edd6bb62-4a91-68db-5b74-56f806b7e7ac","type":"statement"},{"rank":"normal","references":[{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.coe.int/en/web/portal/germany"}}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Germany became member of the Council of Europe on 13 July 1950."}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.coe.int/en/web/portal/germany"}},{"property":"P387","datavalue":{"type":"string","value":"Germany became member of the Council of Europe on 13 July 1950."}}],"snaks-order":["P854","P387"]},{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Europarat#Mitglieder"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Europarat#Mitglieder"}}],"snaks-order":["P854"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q183$e1bcc1c6-4c94-4113-fd58-94cb62b8a6c4","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20 (2) Alle Staatsgewalt geht vom Volke aus. Sie wird vom Volke in Wahlen und Abstimmungen und durch besondere Organe der Gesetzgebung, der vollziehenden Gewalt und der Rechtsprechung ausgeübt."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20 (2) Alle Staatsgewalt geht vom Volke aus. Sie wird vom Volke in Wahlen und Abstimmungen und durch besondere Organe der Gesetzgebung, der vollziehenden Gewalt und der Rechtsprechung ausgeübt."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7270}},"datatype":"wikibase-item"},"id":"Q183$d53fce77-4c47-b46a-0f1b-e53824ab2686","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":619610}},"datatype":"wikibase-item"},"id":"Q183$b76df508-420c-9141-f4e2-abc4e4897332","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20(3) Die Gesetzgebung ist an die verfassungsmäßige Ordnung, die vollziehende Gewalt und die Rechtsprechung sind an Gesetz und Recht gebunden."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20(3) Die Gesetzgebung ist an die verfassungsmäßige Ordnung, die vollziehende Gewalt und die Rechtsprechung sind an Gesetz und Recht gebunden."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":4209223}},"datatype":"wikibase-item"},"id":"Q183$c763e81f-4eee-eabd-9c3a-7164abcfdc46","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7275}},"datatype":"wikibase-item"},"id":"Q183$a8d2bb5b-4ba3-22c8-00a4-865b1e622182","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P183","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P189","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18635217}},"datatype":"wikibase-item"},"id":"P189$6c011417-4e64-78b7-cc1d-ff5d0c557abe","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P193","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P195","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q197","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P200","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q202","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":21199}},"datatype":"wikibase-item"},"id":"q202$86B5F4BE-FD9F-4FD4-B8A6-9A7BBC857F5E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":50707}},"datatype":"wikibase-item"},"id":"q202$e9b902d9-48a9-0333-4c6f-df5c93308c16","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":467511}},"datatype":"wikibase-item"},"id":"q202$7e294f50-4388-45ff-917a-7d1226b19b92","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":50705}},"datatype":"wikibase-item"},"id":"q202$36d1b208-42a7-7f46-ab14-852889698d89","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":13366104}},"datatype":"wikibase-item"},"id":"Q202$E3C72A56-1B3F-47CE-A090-B6C4D586AFE5","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":190890}},"datatype":"wikibase-item"},"id":"Q202$62042722-84A4-45D7-895B-F018395D9E28","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":975166}},"datatype":"wikibase-item"},"id":"Q202$736D2862-693D-42F1-8688-49DB0146554D","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P206","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615777}},"datatype":"wikibase-item"},"id":"P206$eeea0757-45ab-a2c0-c942-1f95f0854cc2","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q207","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"q207$54a1c2b9-4b94-2d6c-576d-15ef3c527b14","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P208","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q210","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11352}},"datatype":"wikibase-item"},"id":"q210$2DBF6CA1-DD8D-4CEA-A152-0A37E1C6D217","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q216","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"q216$71CEE092-9B75-4783-B479-F651841ECCEA","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5119}},"datatype":"wikibase-item"},"id":"q216$91CCAEAD-8B4E-4E1B-AC52-9552A411031F","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1363145}},"datatype":"wikibase-item"},"id":"Q216$dff956ef-483d-feec-f6a9-baf0d915e3db","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q217","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q217$5B200529-E18E-4A7F-8070-DBB25C32BD89","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":179164}},"datatype":"wikibase-item"},"id":"Q217$54f34829-44c3-d5d6-eceb-c7d568ed543e","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":123480}},"datatype":"wikibase-item"},"id":"Q217$0ef0ab81-48ae-ec4d-8485-3694b3c08e9e","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"Q217$f1b30a97-45fc-d41d-a165-cacaf267ff77","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q217$41ba6be5-4e51-81f0-0092-c19242403ba7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q217$48955323-4b40-614d-d9a9-43325377b377","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q218","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q218$7887078F-118F-4D38-B2D9-3391EA26A154","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q218$E69D5C77-0362-451A-A619-173844FA5D00","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"Q218$6d5560b4-403a-39ff-dd80-2ee95bab9ca6","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q220","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"q220$093BE690-FEED-49CB-8EB2-BD5FEC47B8A7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":747074}},"datatype":"wikibase-item"},"id":"q220$168e1d6a-44a1-d705-308e-3f771aa38854","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5119}},"datatype":"wikibase-item"},"qualifiers":{"P17":[{"snaktype":"value","property":"P17","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":38}},"datatype":"wikibase-item"}]},"qualifiers-order":["P17"],"id":"q220$8FB724DE-9C66-4711-AED2-014C50EBE3CE","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P220","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P225","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18609040}},"datatype":"wikibase-item"},"id":"P225$c791da3a-40d8-25f2-af7b-452b807a8bd4","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P227","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18614948}},"datatype":"wikibase-item"},"id":"P227$a1625bd2-4359-9c8b-ec9a-87ff82978222","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":853614}},"datatype":"wikibase-item"},"id":"P227$45897b3b-4287-28c1-5411-d084c7d8282d","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q228","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q228$82D5B5DF-AFDC-4B00-89BC-9E3633E5DA0E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q228$e8956bd3-4d87-5941-52ee-aefb1d55bba0","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q228$111c1a20-4955-b4ff-32fb-7ffadda7340d","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q228$df49ab43-4bab-063a-825c-dbd70a708f4a","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":123480}},"datatype":"wikibase-item"},"id":"Q228$d1d1baf8-4845-597e-8a1e-585739183f09","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":208500}},"datatype":"wikibase-item"},"id":"Q228$0806dcf5-4ae1-e90c-d214-668e7861a059","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P230","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P231","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q233","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q233$EAE14727-D2B9-423D-A8FB-451034A292C7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":112099}},"datatype":"wikibase-item"},"id":"q233$78142EAC-A9BA-4791-B850-331C45E68D84","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q233$60F83D83-2AA1-4235-9CED-89D56F5502B3","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":179164}},"datatype":"wikibase-item"},"id":"Q233$b77b5c5b-4a09-5863-dec1-2b3f9da9557c","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q233$091bcd4f-462f-efa6-4842-f13dcd4e9b20","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q233$76a86556-4238-8dd9-b57c-1a4cba750b6d","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q244","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":202686}},"datatype":"wikibase-item"},"id":"q244$47574b84-4190-ee0e-47df-f50cc75562e7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7275}},"datatype":"wikibase-item"},"id":"Q244$055FCA94-3A3F-41F5-8011-989F5C3D997F","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":112099}},"datatype":"wikibase-item"},"id":"Q244$ad917cb8-4c1a-6970-7cdd-501d0831ad99","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P246","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P248","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608359}},"datatype":"wikibase-item"},"id":"P248$6a0671bf-4e3e-d161-1700-8434560468e7","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q257","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":81392}},"datatype":"wikibase-item"},"id":"q257$B04FFD59-16E4-43E4-B503-A9EF09EDC490","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":36161}},"datatype":"wikibase-item"},"id":"q257$BB70B907-96DA-47FB-9A63-99C36CA315DD","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P263","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P263$90971614-46a9-7c95-0ac2-64ffd222cd2f","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q277","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":8447}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":8447}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10373548}},"datatype":"wikibase-item"},"id":"q277$D02C900D-2EC6-49CB-9BFE-A055A6F7C275","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q278","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10373548}},"datatype":"wikibase-item"},"id":"q278$4634A61F-555A-4072-8B54-6F2C20C0DDF1","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q281","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":199360}},"datatype":"wikibase-item"},"id":"Q281$ca84cd87-4e73-58bb-a836-3d91e8895922","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q284","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":199360}},"datatype":"wikibase-item"},"id":"Q284$9417e429-4e4a-7864-4f4d-d7b9513af9de","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q286","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1074}},"datatype":"wikibase-item"},"id":"Q286$2A823B0D-0465-4BCB-9A78-17418ACDD288","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q288","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11920}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11920}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":484170}},"datatype":"wikibase-item"},"id":"q288$F922B432-E8A2-47D8-B539-BDAE91BB8018","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q291","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":483394}},"datatype":"wikibase-item"},"id":"Q291$baf8d767-4632-e637-977c-0a2c38e9321f","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q293","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":16521}},"datatype":"wikibase-item"},"id":"Q293$F840EB32-811B-4498-9E03-CD1F03D4240B","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q297","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"Q297$4E477B20-5247-4AAD-8D07-FDF90386CB51","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q298","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q298$54472542-9E4E-4AA8-A92F-D8ED19E8AA20","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q298$15491D60-6363-4B69-92D5-733BD7C5DC07","type":"statement"}]}} ] Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-for-testing.json000066400000000000000000000042651444772566300305600ustar00rootroot00000000000000[ {"type":"item","aliases":{"de":[{"language":"de","value":"Weltall"},{"language":"de","value":"All"},{"language":"de","value":"Kosmos"},{"language":"de","value":"Weltraum"}]},"labels":{"de":{"language":"de","value":"Universum"}},"descriptions":{"de":{"language":"de","value":"Gesamtheit der Energie, des Raumes und der Materie"}},"sitelinks":{"frwiki":{"badges":[],"site":"frwiki","title":"Univers"}},"id":"Q1","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1454986}},"datatype":"wikibase-item"},"id":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","type":"statement"}],"P18":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P18","datavalue":{"type":"string","value":"Hubble ultra deep field.jpg"}},"id":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{"de":{"language":"de","value":"Glück"}},"descriptions":{"de":{"language":"de","value":"Erfüllung menschlichen Wünschens und Strebens"}},"sitelinks":{"frwiki":{"badges":[],"site":"frwiki","title":"Bonheur"}},"id":"Q8","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":331769}},"datatype":"wikibase-item"},"id":"q8$E2EFA381-BA5D-4F52-AF74-660B9A044C1E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":9415}},"datatype":"wikibase-item"},"id":"q8$5DB9C22E-4D86-4FB1-AC36-5C75CC806D6A","type":"statement"}],"P18":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P18","datavalue":{"type":"string","value":"Sweet Baby Kisses Family Love.jpg"}},"id":"q8$7dcd6734-4ece-8b37-9386-92e5d4e801cd","type":"statement"}]}}, {"type":"property","aliases":{"de":[{"language":"de","value":"Strassennetz"}]},"labels":{"de":{"language":"de","value":"Straßennetz"}},"descriptions":{"de":{"language":"de","value":"übergeordnetes Straßensystem"}},"datatype":"wikibase-item","id":"P16","claims":{}}, {"type":"lexeme","id":"L1","lexicalCategory":"Q4","language":"Q9"} ] Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-for-testing.xml000066400000000000000000000153441444772566300304070ustar00rootroot00000000000000 Wikidata Toolkit Test http://example.org/wiki/Main_Page Markus 1.23wmf14 first-letter Media Special Talk Property Property talk Translations Translations talk Q1 0 32 4 3 2014-02-19T23:34:14Z 127.0.0.4 Test comment 4 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 4"}}} ignored wikibase-item application/json 5 4 2014-02-19T23:34:15Z 127.0.0.5 Test comment 5 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 5"}}} ignored wikibase-item application/json 3 2 2014-02-19T23:34:13Z 127.0.0.3 Test comment 3 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 3"}}} ignored wikibase-item application/json 2 1 2014-02-19T23:34:12Z 127.0.0.2 Test comment 2 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 2"}}} ignored wikibase-item application/json Wikidata:Contact the development team 4 181 110689111 110689110 2014-02-20T23:34:11Z User 1 1001 Test comment 1 Test wikitext 1 Line 2 Line 3 ignored wikitext text/x-wiki 110689112 110689111 2014-02-20T23:34:12Z User 2 1002 Test comment 2 Test wikitext 2 Line 2 Line 3 ignored wikitext text/x-wiki Property:P1 120 12345 10004 10003 2014-02-19T23:34:14Z 127.0.0.4 Test comment 10004 {"id":"P1","type":"property","labels":{"en":{"language":"en","value":"Revision 10004"}},"datatype":"wikibase-item"} ignored wikibase-property application/json 10005 10004 2014-02-19T23:34:15Z 127.0.0.5 Test comment 10005 {"id":"P1","type":"property","labels":{"en":{"language":"en","value":"Revision 10005"}},"datatype":"wikibase-item"} ignored wikibase-property application/json Lexeme:L1 122 1235667 100009 8897 2014-02-19T23:34:19Z 127.0.0.9 Test comment 10009 {"type":"lexeme","id":"L1","lexicalCategory":"Q4","language":"Q9"} ignored wikibase-lexeme application/json 100010 8898 2014-02-19T23:34:10Z 127.0.0.10 Test comment 10010 {"type":"lexeme","id":"L1","lexicalCategory":"Q5","language":"Q10"} ignored wikibase-lexeme application/json Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-header.xml000066400000000000000000000017671444772566300274020ustar00rootroot00000000000000 Wikidata Toolkit Test http://example.org/wiki/Main_Page Markus 1.23wmf14 first-letter Media Special Talk Property Property talk Translations Translations talk Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-incomplete-revision.xml000066400000000000000000000034001444772566300321270ustar00rootroot00000000000000 Wikidata Toolkit Test http://example.org/wiki/Main_Page Markus 1.23wmf14 first-letter Media Special Talk Property Property talk Translations Translations talk Q1 0 32 4 3 2014-02-19T23:34:14Z 127.0.0.4 Test comment 4 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 4"}}} ignored wikibase-item application/json 5 4 2014-02-19T23:34:15Z 127.0.0.5Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-with-bugs.json000066400000000000000000001771301444772566300302320ustar00rootroot00000000000000[ {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q1","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1454986}},"datatype":"wikibase-item"},"id":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":[],"descriptions":{},"sitelinks":{},"id":"Q8","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":331769}},"datatype":"wikibase-item"},"id":"q8$E2EFA381-BA5D-4F52-AF74-660B9A044C1E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":9415}},"datatype":"wikibase-item"},"id":"q8$5DB9C22E-4D86-4FB1-AC36-5C75CC806D6A","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P16","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P19","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608756}},"datatype":"wikibase-item"},"id":"P19$84157cf4-4650-a50a-72ce-fbaa254e7f34","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P19$affa32c3-4cd8-1842-bc28-35b3288cce5d","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18635217}},"datatype":"wikibase-item"},"id":"P19$4a666229-40c1-474f-a5a0-7ff90f767207","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P22","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P22$78f8ae50-4e81-55cb-119a-f5c49b828c37","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q23","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"q23$935f9100-47ca-f387-7946-45f9db09e81f","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q24","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":15632617}},"datatype":"wikibase-item"},"id":"q24$4E4D9DE3-CDC7-48E5-A644-18489D523EA1","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q31","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q31$B3580D37-D30E-4BC6-A591-2CE49253CAB4","type":"statement"},{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":43702}},"datatype":"wikibase-item"},"id":"q31$D548FD84-0A96-4954-B672-1C4A4943DEE0","type":"statement"},{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10000}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q31$C082AE58-D018-4E46-89E5-1130E01F7632","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q31$5c117765-4eb5-0073-f352-9a9b81f21f59","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q31$d3b6fe0e-4a05-0834-4959-ad43f9b47c99","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q31$84b4d16f-4773-ea52-e851-3abe5ea24089","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P31","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q33","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P304":[{"snaktype":"value","property":"P304","datavalue":{"type":"string","value":"603"}}],"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":14334357}},"datatype":"wikibase-item"}],"P1683":[{"snaktype":"value","property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"fi","text":"Joulukuun kuudentena päivänä vuonna 1917 Suomen eduskunta hyväksyi senaatin ilmoituksen siitä, että Suomi oli nyt itsenäinen."}},"datatype":"monolingualtext"}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":14334357}},"datatype":"wikibase-item"},{"property":"P304","datavalue":{"type":"string","value":"603"}},{"property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"fi","text":"Joulukuun kuudentena päivänä vuonna 1917 Suomen eduskunta hyväksyi senaatin ilmoituksen siitä, että Suomi oli nyt itsenäinen."}},"datatype":"monolingualtext"}],"snaks-order":["P248","P304","P1683"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001917-12-06T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q33$CBE1D73C-6F18-45E6-A437-7657B825E87E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q33$1D955803-700D-4B70-997F-2ABB4C084EB2","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001995-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q33$81CCBEAB-A5E7-404A-B7E3-E46B240E179F","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001955-12-14T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q33$0888ad3b-482b-1629-7deb-a9394955ce7a","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001989-05-05T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q33$54d5a285-4fd3-82a3-57ae-9b12b7ab2148","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":179164}},"datatype":"wikibase-item"},"id":"Q33$cdab5cb1-4e80-6b08-7f5b-bbbacc3db6ca","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P35","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P36","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615777}},"datatype":"wikibase-item"},"id":"P36$b05e52e4-49bb-04bc-0efe-c857f4ee6010","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P37","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18616084}},"datatype":"wikibase-item"},"id":"P37$96d50800-4435-2965-dfbc-337388287f9f","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P39","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P39$5012a41d-40d0-cbfb-24b5-3a416b151549","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q42","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P214":[{"snaktype":"value","property":"P214","datavalue":{"type":"string","value":"113230702"}}],"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":54919}},"datatype":"wikibase-item"}],"P813":[{"snaktype":"value","property":"P813","datavalue":{"type":"time","value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":54919}},"datatype":"wikibase-item"},{"property":"P214","datavalue":{"type":"string","value":"113230702"}},{"property":"P813","datavalue":{"type":"time","value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}],"snaks-order":["P248","P214","P813"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"Q42$F078E5B3-F9A8-480E-B7AC-D97778CBBEF9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q45","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q45$7B1056BA-FDFB-49D6-9F83-0801DDC93535","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q45$26b2860a-4d55-fe91-d6ef-1419a7573981","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q45$b143acfe-4531-1b56-22f4-ec5f6045d473","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q51","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5107}},"datatype":"wikibase-item"},"id":"q51$1156F414-87F1-46BB-BABA-8AAF3DA0F45C","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P53","claims":[]}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P54","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P54$df12171d-484f-7891-b0ed-6981024f2e23","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q55","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":15304003}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001954-12-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q55$E3418C6B-2F14-4FF2-AE16-C3B3C09F9C88","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1763527}},"datatype":"wikibase-item"},"id":"Q55$474B29F2-F2A6-49AD-A1EC-7CD0C757D3E4","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7275}},"datatype":"wikibase-item"},"id":"Q55$0c63c3a1-48b0-6eaf-9edc-dd024518ae4c","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q58","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q62","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":13218382}},"datatype":"wikibase-item"},"id":"q62$C535861D-BC41-4B6E-93AD-0C46B961B700","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1549591}},"datatype":"wikibase-item"},"id":"Q62$fd2fa7d3-4fa9-e44c-fd24-547380174bbc","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"Q62$b21e6bb2-4bb5-7692-d436-d2d22d6bf063","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q68","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":12468333}},"datatype":"wikibase-item"},"id":"Q68$2C1D1AAF-A295-403E-AFCB-EB902DB5762F","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q75","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1301371}},"datatype":"wikibase-item"},"id":"q75$09CE92CB-C019-4E99-B6A5-4460B5DC1AA2","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P81","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q83","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6686945}},"datatype":"wikibase-item"},"id":"q83$63673D69-9CBD-43E9-AF03-F93523C223E0","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":341}},"datatype":"wikibase-item"},"id":"Q83$6563c8b1-4db3-1231-dbd2-ae02b360c877","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P88","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18618644}},"datatype":"wikibase-item"},"id":"P88$2ddcc1f9-4b77-6b35-5716-0f3f5828c680","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P92","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q99","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":35657}},"datatype":"wikibase-item"},"id":"Q99$AF13EDE9-BD02-43E2-BEE0-1B50977E5247","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q100","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"q100$F5900CEA-A5F7-4600-B7C4-041C6B1F4D3E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1549591}},"datatype":"wikibase-item"},"id":"Q100$ad5b329b-43c9-f6d9-9d0b-a08c1f4f0abb","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q102","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":101991}},"datatype":"wikibase-item"},"id":"q102$8B5A6E17-E645-4F94-AE93-C02515B608F7","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P102","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P102$e96b18d8-4edd-5073-855e-655584de60ac","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q103","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7366}},"datatype":"wikibase-item"},"id":"q103$e723bd53-46db-4968-8595-5537b9c1e707","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P105","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18609040}},"datatype":"wikibase-item"},"id":"P105$46bcec0c-4854-2a1e-c099-949d9cbd9e03","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P108","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P108$5cd59d86-4bfe-c033-7e9e-7103c78f6dd0","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"commonsMedia","id":"P109","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P109$58e62b48-489a-5c14-c77d-1673c1320d15","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P111","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P118","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P119","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608756}},"datatype":"wikibase-item"},"id":"P119$4bf19ee9-4e0b-a15a-405c-8d1afaaa3384","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P119$d9136aab-4751-613f-5306-5df61c98c660","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18635217}},"datatype":"wikibase-item"},"id":"P119$9b6ba302-4c78-5a6b-22e7-8d9ab18c8c01","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P121","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q125","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q129","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":41825}},"datatype":"wikibase-item"},"id":"Q129$1101349A-38FB-45F0-B365-60CD8EEE0ACA","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P131","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615777}},"datatype":"wikibase-item"},"id":"P131$bbea2c26-4fbe-11ba-abba-08baf0ace03a","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q136","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":208440}},"datatype":"wikibase-item"},"id":"q136$64A9A736-8806-47F7-9FB6-686A7DBB87B0","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q140","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":16521}},"datatype":"wikibase-item"},"id":"q140$8EE98E5B-4A9C-4BF5-B456-FB77E8EE4E69","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P143","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608359}},"datatype":"wikibase-item"},"id":"P143$3ff1afa0-4d3e-256c-8efb-d624229044b4","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q144","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":16521}},"datatype":"wikibase-item"},"id":"Q144$3bf4b72c-4d23-6a49-eced-b9a17b81b7d9","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q147","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q148","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q148$F54994F8-1C4C-4929-8CB3-91DCED2A4BDB","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q148$4454ff59-46b6-f73b-df46-d0c03c4505f6","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6688521}},"datatype":"wikibase-item"},"qualifiers":{"P580":[{"snaktype":"value","property":"P580","datavalue":{"type":"time","value":{"time":"+00000001971-10-25T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"}},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q148$9ceccd08-433f-7fba-a5fd-dabf52e6c8e4","type":"statement"},{"rank":"normal","references":[{"snaks":{"P356":[{"snaktype":"value","property":"P356","datavalue":{"type":"string","value":"10.1007/s11127-009-9491-2"}}]},"allSnaks":[{"property":"P356","datavalue":{"type":"string","value":"10.1007/s11127-009-9491-2"}}],"snaks-order":["P356"]},{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.systemicpeace.org/polity/China2010.pdf"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.systemicpeace.org/polity/China2010.pdf"}}],"snaks-order":["P854"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":317}},"datatype":"wikibase-item"},"id":"Q148$c3a4dafb-4062-9e4b-746b-c578822f6dc9","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P149","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q155","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q155$3BF35646-D69E-4BC3-843A-2BFDFEC6D9BB","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6814224}},"datatype":"wikibase-item"},"id":"q155$3147400A-6A8C-48DE-99EB-29F713B4D69E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q155$9740be48-4542-bbea-41b3-fb254858e780","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":859563}},"datatype":"wikibase-item"},"id":"q155$8774ac4a-4da3-0343-7785-b4c161eb89d2","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":4209223}},"datatype":"wikibase-item"},"id":"q155$c87f5e5d-4025-ff4b-b1cd-02760cac0ad2","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q155$c9d9c33f-4ea8-9ff0-269e-f68a51d3f343","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6316828}},"datatype":"wikibase-item"},"id":"Q155$b1db9fc5-486e-8f33-03e1-1399f91d2023","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P155","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615033}},"datatype":"wikibase-item"},"id":"P155$0e2372e9-4af6-6995-fe43-9f925c576afc","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q156","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":174211}},"datatype":"wikibase-item"},"id":"Q156$F86CD234-50BD-43CC-AE32-67065432CF80","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q163","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1138494}},"datatype":"wikibase-item"},"id":"Q163$038a9f31-4821-d78c-9317-b2f7b6da9912","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P163","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q166","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":165}},"datatype":"wikibase-item"},"id":"q166$4F2C4ECC-4E3F-4D97-8830-37A11FFBACE2","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P166","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P166$62623b1f-41b2-e961-ae2f-0598e86abde7","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P169","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P171","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18609040}},"datatype":"wikibase-item"},"id":"P171$8bd4f9b1-4a52-cb35-eeaf-7c3199567c24","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P175","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18618644}},"datatype":"wikibase-item"},"id":"P175$3fda1655-4092-134c-5485-c6cde5a467d1","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q177","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":2095}},"datatype":"wikibase-item"},"id":"Q177$F50982AF-5183-47CD-AE5D-80F1FE2751ED","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q178","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":192874}},"datatype":"wikibase-item"},"id":"Q178$C6F03114-8653-4FC6-B348-9186A3E2E27E","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P178","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q183","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.documentarchiv.de/brd/dtlvertrag.html"}}],"P1683":[{"snaktype":"value","property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.documentarchiv.de/brd/dtlvertrag.html"}},{"property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}],"snaks-order":["P854","P1683"]},{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1206379}},"datatype":"wikibase-item"}],"P1683":[{"snaktype":"value","property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1206379}},"datatype":"wikibase-item"},{"property":"P1683","datavalue":{"type":"monolingualtext","value":{"language":"de","text":"Mit dem Inkrafttreten dieses Vertrags werden die Vereinigten Staaten von Amerika, das Vereinigte Königreich von Großbritannien und Nordirland und die Französische Republik das Besatzungsregime in der Bundesrepublik beenden, das Besatzungsstatut aufheben und die Alliierte Hohe Kommission sowie die Dienststellen der Landeskommissare in der Bundesrepublik auflösen."}},"datatype":"monolingualtext"}],"snaks-order":["P248","P1683"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q183$7F17CEF2-2897-4C4D-9D1C-B31DC979AF5C","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11122}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"DER PRÄSIDENT DER BUNDESREPUBLIK DEUTSCHLAND, [.]HABEN BESCHLOSSEN, eine Europäische Union zu gründen[.]"}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11122}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"DER PRÄSIDENT DER BUNDESREPUBLIK DEUTSCHLAND, [.]HABEN BESCHLOSSEN, eine Europäische Union zu gründen[.]"}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q183$03C2E14C-6F87-4B91-81AD-2F5D1CC63DAC","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q183$686c68bb-45a2-c75c-04a9-b287b46e85c8","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":43702}},"datatype":"wikibase-item"},"id":"q183$644285f0-4d8d-5ccc-c142-262260f1abee","type":"statement"},{"rank":"normal","references":[{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.un.org/en/members/index.shtml#g"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.un.org/en/members/index.shtml#g"}}],"snaks-order":["P854"]},{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Mitgliedstaaten_der_Vereinten_Nationen"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Mitgliedstaaten_der_Vereinten_Nationen"}}],"snaks-order":["P854"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q183$edd6bb62-4a91-68db-5b74-56f806b7e7ac","type":"statement"},{"rank":"normal","references":[{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"http://www.coe.int/en/web/portal/germany"}}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Germany became member of the Council of Europe on 13 July 1950."}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"http://www.coe.int/en/web/portal/germany"}},{"property":"P387","datavalue":{"type":"string","value":"Germany became member of the Council of Europe on 13 July 1950."}}],"snaks-order":["P854","P387"]},{"snaks":{"P854":[{"snaktype":"value","property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Europarat#Mitglieder"}}]},"allSnaks":[{"property":"P854","datavalue":{"type":"string","value":"https://de.wikipedia.org/wiki/Europarat#Mitglieder"}}],"snaks-order":["P854"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q183$e1bcc1c6-4c94-4113-fd58-94cb62b8a6c4","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20 (2) Alle Staatsgewalt geht vom Volke aus. Sie wird vom Volke in Wahlen und Abstimmungen und durch besondere Organe der Gesetzgebung, der vollziehenden Gewalt und der Rechtsprechung ausgeübt."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20 (2) Alle Staatsgewalt geht vom Volke aus. Sie wird vom Volke in Wahlen und Abstimmungen und durch besondere Organe der Gesetzgebung, der vollziehenden Gewalt und der Rechtsprechung ausgeübt."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7270}},"datatype":"wikibase-item"},"id":"Q183$d53fce77-4c47-b46a-0f1b-e53824ab2686","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20(1) Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":619610}},"datatype":"wikibase-item"},"id":"Q183$b76df508-420c-9141-f4e2-abc4e4897332","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Artikel 20(3) Die Gesetzgebung ist an die verfassungsmäßige Ordnung, die vollziehende Gewalt und die Rechtsprechung sind an Gesetz und Recht gebunden."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Artikel 20(3) Die Gesetzgebung ist an die verfassungsmäßige Ordnung, die vollziehende Gewalt und die Rechtsprechung sind an Gesetz und Recht gebunden."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":4209223}},"datatype":"wikibase-item"},"id":"Q183$c763e81f-4eee-eabd-9c3a-7164abcfdc46","type":"statement"},{"rank":"normal","references":[{"snaks":{"P248":[{"snaktype":"value","property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"}],"P387":[{"snaktype":"value","property":"P387","datavalue":{"type":"string","value":"Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}]},"allSnaks":[{"property":"P248","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":56045}},"datatype":"wikibase-item"},{"property":"P387","datavalue":{"type":"string","value":"Die Bundesrepublik Deutschland ist ein demokratischer und sozialer Bundesstaat."}}],"snaks-order":["P248","P387"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7275}},"datatype":"wikibase-item"},"id":"Q183$a8d2bb5b-4ba3-22c8-00a4-865b1e622182","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P183","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P189","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18635217}},"datatype":"wikibase-item"},"id":"P189$6c011417-4e64-78b7-cc1d-ff5d0c557abe","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P193","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P195","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q197","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P200","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q202","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":21199}},"datatype":"wikibase-item"},"id":"q202$86B5F4BE-FD9F-4FD4-B8A6-9A7BBC857F5E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":50707}},"datatype":"wikibase-item"},"id":"q202$e9b902d9-48a9-0333-4c6f-df5c93308c16","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":467511}},"datatype":"wikibase-item"},"id":"q202$7e294f50-4388-45ff-917a-7d1226b19b92","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":50705}},"datatype":"wikibase-item"},"id":"q202$36d1b208-42a7-7f46-ab14-852889698d89","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":13366104}},"datatype":"wikibase-item"},"id":"Q202$E3C72A56-1B3F-47CE-A090-B6C4D586AFE5","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":190890}},"datatype":"wikibase-item"},"id":"Q202$62042722-84A4-45D7-895B-F018395D9E28","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":975166}},"datatype":"wikibase-item"},"id":"Q202$736D2862-693D-42F1-8688-49DB0146554D","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P206","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18615777}},"datatype":"wikibase-item"},"id":"P206$eeea0757-45ab-a2c0-c942-1f95f0854cc2","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q207","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"q207$54a1c2b9-4b94-2d6c-576d-15ef3c527b14","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P208","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q210","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11352}},"datatype":"wikibase-item"},"id":"q210$2DBF6CA1-DD8D-4CEA-A152-0A37E1C6D217","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q216","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"q216$71CEE092-9B75-4783-B479-F651841ECCEA","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5119}},"datatype":"wikibase-item"},"id":"q216$91CCAEAD-8B4E-4E1B-AC52-9552A411031F","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1363145}},"datatype":"wikibase-item"},"id":"Q216$dff956ef-483d-feec-f6a9-baf0d915e3db","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q217","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q217$5B200529-E18E-4A7F-8070-DBB25C32BD89","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":179164}},"datatype":"wikibase-item"},"id":"Q217$54f34829-44c3-d5d6-eceb-c7d568ed543e","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":123480}},"datatype":"wikibase-item"},"id":"Q217$0ef0ab81-48ae-ec4d-8485-3694b3c08e9e","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"Q217$f1b30a97-45fc-d41d-a165-cacaf267ff77","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q217$41ba6be5-4e51-81f0-0092-c19242403ba7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q217$48955323-4b40-614d-d9a9-43325377b377","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q218","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q218$7887078F-118F-4D38-B2D9-3391EA26A154","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q218$E69D5C77-0362-451A-A619-173844FA5D00","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"Q218$6d5560b4-403a-39ff-dd80-2ee95bab9ca6","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q220","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":515}},"datatype":"wikibase-item"},"id":"q220$093BE690-FEED-49CB-8EB2-BD5FEC47B8A7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":747074}},"datatype":"wikibase-item"},"id":"q220$168e1d6a-44a1-d705-308e-3f771aa38854","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5119}},"datatype":"wikibase-item"},"qualifiers":{"P17":[{"snaktype":"value","property":"P17","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":38}},"datatype":"wikibase-item"}]},"qualifiers-order":["P17"],"id":"q220$8FB724DE-9C66-4711-AED2-014C50EBE3CE","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P220","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P225","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18609040}},"datatype":"wikibase-item"},"id":"P225$c791da3a-40d8-25f2-af7b-452b807a8bd4","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P227","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18614948}},"datatype":"wikibase-item"},"id":"P227$a1625bd2-4359-9c8b-ec9a-87ff82978222","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":853614}},"datatype":"wikibase-item"},"id":"P227$45897b3b-4287-28c1-5411-d084c7d8282d","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q228","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q228$82D5B5DF-AFDC-4B00-89BC-9E3633E5DA0E","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q228$e8956bd3-4d87-5941-52ee-aefb1d55bba0","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q228$111c1a20-4955-b4ff-32fb-7ffadda7340d","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q228$df49ab43-4bab-063a-825c-dbd70a708f4a","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":123480}},"datatype":"wikibase-item"},"id":"Q228$d1d1baf8-4845-597e-8a1e-585739183f09","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":208500}},"datatype":"wikibase-item"},"id":"Q228$0806dcf5-4ae1-e90c-d214-668e7861a059","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P230","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P231","claims":{}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q233","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q233$EAE14727-D2B9-423D-A8FB-451034A292C7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":112099}},"datatype":"wikibase-item"},"id":"q233$78142EAC-A9BA-4791-B850-331C45E68D84","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":185441}},"datatype":"wikibase-item"},"id":"q233$60F83D83-2AA1-4235-9CED-89D56F5502B3","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":179164}},"datatype":"wikibase-item"},"id":"Q233$b77b5c5b-4a09-5863-dec1-2b3f9da9557c","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":160016}},"datatype":"wikibase-item"},"id":"Q233$091bcd4f-462f-efa6-4842-f13dcd4e9b20","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6505795}},"datatype":"wikibase-item"},"id":"Q233$76a86556-4238-8dd9-b57c-1a4cba750b6d","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q244","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":202686}},"datatype":"wikibase-item"},"id":"q244$47574b84-4190-ee0e-47df-f50cc75562e7","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":7275}},"datatype":"wikibase-item"},"id":"Q244$055FCA94-3A3F-41F5-8011-989F5C3D997F","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":112099}},"datatype":"wikibase-item"},"id":"Q244$ad917cb8-4c1a-6970-7cdd-501d0831ad99","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"string","id":"P246","claims":{}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P248","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608359}},"datatype":"wikibase-item"},"id":"P248$6a0671bf-4e3e-d161-1700-8434560468e7","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q257","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":81392}},"datatype":"wikibase-item"},"id":"q257$B04FFD59-16E4-43E4-B503-A9EF09EDC490","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":36161}},"datatype":"wikibase-item"},"id":"q257$BB70B907-96DA-47FB-9A63-99C36CA315DD","type":"statement"}]}}, {"type":"property","aliases":{},"labels":{},"descriptions":{},"datatype":"wikibase-item","id":"P263","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":18608871}},"datatype":"wikibase-item"},"id":"P263$90971614-46a9-7c95-0ac2-64ffd222cd2f","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q277","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":8447}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":8447}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10373548}},"datatype":"wikibase-item"},"id":"q277$D02C900D-2EC6-49CB-9BFE-A055A6F7C275","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q278","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":10373548}},"datatype":"wikibase-item"},"id":"q278$4634A61F-555A-4072-8B54-6F2C20C0DDF1","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q281","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":199360}},"datatype":"wikibase-item"},"id":"Q281$ca84cd87-4e73-58bb-a836-3d91e8895922","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q284","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":199360}},"datatype":"wikibase-item"},"id":"Q284$9417e429-4e4a-7864-4f4d-d7b9513af9de","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q286","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":1074}},"datatype":"wikibase-item"},"id":"Q286$2A823B0D-0465-4BCB-9A78-17418ACDD288","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q288","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11920}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":11920}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":484170}},"datatype":"wikibase-item"},"id":"q288$F922B432-E8A2-47D8-B539-BDAE91BB8018","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q291","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":483394}},"datatype":"wikibase-item"},"id":"Q291$baf8d767-4632-e637-977c-0a2c38e9321f","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q293","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":16521}},"datatype":"wikibase-item"},"id":"Q293$F840EB32-811B-4498-9E03-CD1F03D4240B","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q297","claims":{"P31":[{"rank":"normal","references":[{"snaks":{"P143":[{"snaktype":"value","property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}]},"allSnaks":[{"property":"P143","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":206855}},"datatype":"wikibase-item"}],"snaks-order":["P143"]}],"mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":5}},"datatype":"wikibase-item"},"id":"Q297$4E477B20-5247-4AAD-8D07-FDF90386CB51","type":"statement"}]}}, {"type":"item","aliases":{},"labels":{},"descriptions":{},"sitelinks":{},"id":"Q298","claims":{"P31":[{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":6256}},"datatype":"wikibase-item"},"id":"q298$54472542-9E4E-4AA8-A92F-D8ED19E8AA20","type":"statement"},{"rank":"normal","mainsnak":{"snaktype":"value","property":"P31","datavalue":{"type":"wikibase-entityid","value":{"entity-type":"item","numeric-id":3624078}},"datatype":"wikibase-item"},"id":"q298$15491D60-6363-4B69-92D5-733BD7C5DC07","type":"statement"}]}} ] Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/mock-dump-with-bugs.xml000066400000000000000000000104521444772566300300520ustar00rootroot00000000000000 Wikidata Toolkit Test http://example.org/wiki/Main_Page Markus 1.23wmf14 first-letter Media Special Talk Property Property talk Translations Translations talk Q1 0 32 4 3 2014-02-19T23:34:14Z 127.0.0.4 Test comment 4 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 4"}}} ignored wikibase-item application/json 5 4 2014-02-19T23:34:15Z 127.0.0.5 Test comment 5 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 5"}}} ignored wikibase-item application/json 3 2 2014-02-19T23:34:13Z 127.0.0.3 Test comment 3 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 3"}}} ignored wikibase-item application/json 2 1 2014-02-19T23:34:12Z 127.0.0.2 Test comment 2 {"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"Revision 2"}}} ignored wikibase-item application/json Wikidata:Contact the development team 4 181 110689111 110689110 2014-02-20T23:34:11Z User 1 1001 Test comment 1 Test wikitext 1 Line 2 Line 3 ignored wikitext text/x-wiki 110689112 110689111 2014-02-20T23:34:12Z User 2 1002 Test comment 2 Test wikitext 2 Line 2 Line 3 ignored wikitext text/x-wiki Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/other-incr-wikidatawiki-index.html000066400000000000000000000067761444772566300322670ustar00rootroot00000000000000 Index of /other/incr/wikidatawiki/

Index of /other/incr/wikidatawiki/

NameLast ModifiedSizeType
Parent Directory/ -  Directory
20140208/2014-Feb-08 20:35:39-  Directory
20140209/2014-Feb-09 20:45:49-  Directory
20140210/2014-Feb-10 20:46:14-  Directory
20140211/2014-Feb-11 20:45:01-  Directory
20140212/2014-Feb-12 20:41:55-  Directory
20140213/2014-Feb-13 20:53:44-  Directory
20140214/2014-Feb-14 20:43:09-  Directory
20140215/2014-Feb-15 20:38:28-  Directory
20140216/2014-Feb-16 20:38:24-  Directory
20140217/2014-Feb-17 20:40:28-  Directory
20140218/2014-Feb-18 20:49:27-  Directory
20140219/2014-Feb-19 20:45:12-  Directory
20140220/2014-Feb-20 20:44:52-  Directory
20140221/2014-Feb-21 07:08:54-  Directory
lighttpd/1.4.26
Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/other-wikidata-index.html000066400000000000000000000023661444772566300304410ustar00rootroot00000000000000 Index of /other/wikidata/

Index of /other/wikidata/


../
20141117.json.gz                                   17-Nov-2014 13:09          3695440078
20141124.json.gz                                   24-Nov-2014 13:59          3724244447
20141201.json.gz                                   01-Dec-2014 14:05          3763475517
20141210.json.gz                                   10-Dec-2014 19:23          3783751253
20141215.json.gz                                   15-Dec-2014 13:58          3804251089
20141222.json.gz                                   22-Dec-2014 13:43          3834540622
20141229.json.gz                                   29-Dec-2014 13:49          3867104415
20150105.json.gz                                   05-Jan-2015 13:33          3889428396
20150112.json.gz                                   12-Jan-2015 13:32          3908362534

Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/sample-daily-dump-20140220.xml000066400000000000000000006476461444772566300305100ustar00rootroot00000000000000 Wikidata http://www.wikidata.org/wiki/Main_Page MediaWiki 1.23wmf14 first-letter Media Special Talk User User talk Wikidata Wikidata talk File File talk MediaWiki MediaWiki talk Template Template talk Help Help talk Category Category talk Property Property talk Query Query talk Module Module talk Translations Translations talk Q1 0 129 110652822 108507771 2014-02-19T23:34:16Z 201.110.113.186 /* wbsetdescription-set:1|en */me da comezon {"label":{"en":"universe","fr":"Univers","la":"universum","uz":"Olam","ru":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","pl":"Wszech\u015bwiat","nb":"Universet","eo":"universo","it":"universo","es":"universo","de":"Universum","ca":"univers","en-gb":"Universe","de-ch":"Universum","fi":"maailmankaikkeus","nn":"Universet","ja":"\u5b87\u5b99","zh-hant":"\u5b87\u5b99","hr":"Svemir","pt":"universo","simple":"Universe","hu":"vil\u00e1gegyetem","nl":"heelal","ro":"univers","sv":"Universum","gl":"universo","eu":"Unibertso","mk":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","da":"Universet","br":"Hollved","et":"Universum","af":"heelal","cy":"Bydysawd","io":"Universo","ia":"Universo","is":"Alheimurinn","tr":"Evren","cs":"vesm\u00edr","sk":"Vesm\u00edr","uk":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","an":"Universo","az":"Kainat","ast":"Universu","gn":"Arapy","bs":"Svemir","sn":"Rudunhumwe","nv":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","dsb":"Uniwersum","hif":"Sansaar","fo":"Alheimurin","fy":"Hielal","ga":"An Chruinne","hak":"Y\u00ee-chhiu","id":"Alam semesta","jv":"Alam semesta","pam":"Sikluban","csb":"Swiatnica","sw":"Ulimwengu","ht":"Liniv\u00e8","ku":"Gerd\u00fbn","lv":"Visums","lt":"Visata","li":"Universum","lmo":"\u00dcnivers","ms":"Alam semesta","mwl":"Ouniberso","nah":"Cem\u0101n\u0101huac","nds-nl":"Hielal","nap":"Annevierzo","frr":"\u00c5\u00e5l","nrm":"Eunivers","nov":"Universe","oc":"Univ\u00e8rs","pfl":"Weldall","pap":"Universo","nds":"Weltruum","qu":"Ch'askancha","stq":"Al","sq":"Gjith\u00ebsia","scn":"Universu","sl":"Vesolje","sh":"Svemir","su":"Jagat","tl":"Uniberso","war":"Sangkalibutan","bat-smg":"V\u0117sata","vi":"v\u0169 tr\u1ee5","zh-min-nan":"\u00da-ti\u016b","bn":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","ar":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","arc":"\u072c\u0712\u071d\u0720","arz":"\u0643\u0648\u0646","be":"\u0421\u0443\u0441\u0432\u0435\u0442","bg":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","ckb":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","cv":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","el":"\u03c3\u03cd\u03bc\u03c0\u03b1\u03bd","fa":"\u06af\u06cc\u062a\u06cc","gu":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","he":"\u05d4\u05d9\u05e7\u05d5\u05dd","hi":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","hy":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","ka":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","kk":"\u04d8\u043b\u0435\u043c","kn":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","ko":"\uc6b0\uc8fc","lez":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","ml":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","mn":"\u041e\u0440\u0447\u043b\u043e\u043d","mr":"\u0935\u093f\u0936\u094d\u0935","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c","ne":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","pnb":"\u06a9\u0627\u0626\u0646\u0627\u062a","rue":"\u0412\u0435\u0441\u043c\u0456\u0440","sr":"\u0421\u0432\u0435\u043c\u0438\u0440","ta":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","te":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","tg":"\u041a\u043e\u0438\u043d\u043e\u0442","th":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","tt":"\u0413\u0430\u043b\u04d9\u043c","ur":"\u06a9\u0627\u0626\u0646\u0627\u062a","xmf":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","yi":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","zh":"\u5b87\u5b99","zh-classical":"\u5b87\u5b99","zh-yue":"\u5b87\u5b99","en-ca":"Universe","pt-br":"universo","yue":"\u5b87\u5b99","zh-cn":"\u5b87\u5b99","zh-hans":"\u5b87\u5b99","zh-sg":"\u5b87\u5b99","zh-my":"\u5b87\u5b99","zh-hk":"\u5b87\u5b99","zh-tw":"\u5b87\u5b99","zh-mo":"\u5b87\u5b99","de-formal":"Universum","si":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","be-x-old":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","ilo":"law-ang","jbo":"munje","vep":"Mir","be-tarask":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","bar":"W\u00f6dall","pms":"Univers","sr-ec":"\u0421\u0432\u0435\u043c\u0438\u0440","sr-el":"Svemir","sco":"Universe","or":"\u0b2c\u0b4d\u0b30\u0b39\u0b4d\u0b2e\u0b3e\u0b23\u0b4d\u0b21"},"description":{"la":"res quae omnem materiam et spatium continet","en":"me da comezon","fr":"ensemble des plan\u00e8tes, des \u00e9toiles, des galaxies, de l'espace intergalactique, ainsi que de toute la mati\u00e8re et de l'\u00e9nergie","pl":"Wszystko, co fizycznie istnieje: ca\u0142a przestrze\u0144, czas, wszystkie formy materii i energii oraz prawa fizyki i sta\u0142e fizyczne okre\u015blaj\u0105ce ich zachowanie.","es":"totalidad del espacio-tiempo, la materia y la energ\u00eda existentes","de":"Gesamtheit aller Planeten, Sterne, Galaxien, des intergalaktischen Raums, und aller Materie und Energie","it":"insieme di tutto ci\u00f2 che esiste","eo":"la tuto de \u0109io ekzistanta, steloj, spaco, materio, energio ...","no":"alt som eksisterer av rom, materie og str\u00e5ling","nb":"alt som eksisterer av rom, materie og str\u00e5ling.","nn":"alt som eksisterer, derfor all fysisk masse og energi, planetar, stjerner, galaksar, og alt i det intergalaktiske rommet","en-gb":"The totality of planets, stars, galaxies, intergalactic space, and all matter and energy","nl":"alle materie en energie binnen het gehele ruimte-tijdcontinu\u00fcm waarin wij bestaan","ko":"\ubb34\ud55c\ud55c \uc2dc\uac04\uacfc \ub9cc\ubb3c\uc744 \ud3ec\ud568\ud558\uace0 \uc788\ub294 \ub05d\uc5c6\ub294 \uacf5\uac04\uc758 \ucd1d\uccb4","ca":"totalitat de planetes, estrelles, gal\u00e0xies, espai intergal\u00e0ctic i tota la mat\u00e8ria i energia","fi":"avaruuden ja siin\u00e4 olevan aineen ja energian muodostama kokonaisuus","ru":"\u0444\u0443\u043d\u0434\u0430\u043c\u0435\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043d\u044f\u0442\u0438\u0435 \u0432 \u0430\u0441\u0442\u0440\u043e\u043d\u043e\u043c\u0438\u0438 \u0438 \u0444\u0438\u043b\u043e\u0441\u043e\u0444\u0438\u0438","zh-hans":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hant":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-cn":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-sg":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-my":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hk":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-tw":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-mo":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","ja":"\u60d1\u661f\u3001\u6052\u661f\u3001\u9280\u6cb3\u3001\u9280\u6cb3\u9593\u7a7a\u9593\u3001\u5168\u3066\u306e\u7269\u8cea\u3068\u30a8\u30cd\u30eb\u30ae\u30fc\u306e\u7dcf\u4f53","tr":"y\u0131ld\u0131zlar, gezegenler, gaz, toz, galaksileraras\u0131 madde ve k\u0131saca her \u015fey","uk":"\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c \u0443\u0441\u044c\u043e\u0433\u043e, \u0449\u043e \u0456\u0441\u043d\u0443\u0454: \u0447\u0430\u0441, \u043f\u0440\u043e\u0441\u0442\u0456\u0440, \u043c\u0430\u0442\u0435\u0440\u0456\u044f, \u0435\u043d\u0435\u0440\u0433\u0456\u044f","pt-br":"Tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia.","ta":"\u0bb5\u0bc6\u0bb3\u0bbf \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0b95\u0bbe\u0bb2\u0bae\u0bcd \u0b86\u0b95\u0bbf\u0baf\u0bb5\u0bb1\u0bcd\u0bb1\u0bbf\u0ba9\u0bcd \u0bae\u0bc1\u0bb4\u0bc1\u0bae\u0bc8","ro":"totalitatea planetelor, stelelor, galaxiilor, spa\u0163iului intergalactic \u015fi al materiei \u015fi energiei","da":"Universet defineres almindeligvis som alt eksisterende, inklusiv planeter, stjerner, galakser, indholdet af det intergalaktiske rum, og alt stof og energi.","fa":"\u0645\u062c\u0645\u0648\u0639\u0647 \u0633\u06cc\u0627\u0631\u0647\u200c\u0647\u0627\u060c \u0633\u062a\u0627\u0631\u06af\u0627\u0646\u060c \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627\u060c \u0641\u0636\u0627\u06cc \u0645\u06cc\u0627\u0646 \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627 \u0648 \u0647\u0645\u0647 \u0645\u0627\u062f\u0647 \u0648 \u0627\u0646\u0631\u0698\u06cc","sv":"Det utrymme som per definition inneh\u00e5ller allting, det vill s\u00e4ga all materia, energi, rumtiden, naturlagarna och alla h\u00e4ndelser.","pt":"tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia","ml":"\u0d2d\u0d57\u0d24\u0d3f\u0d15\u0d2e\u0d3e\u0d2f\u0d3f \u0d28\u0d3f\u0d32\u0d28\u0d3f\u0d7d\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d0e\u0d32\u0d4d\u0d32\u0d3e\u0d02 \u0d1a\u0d47\u0d7c\u0d28\u0d4d\u0d28\u0d24\u0d3e\u0d23\u0d4d \u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","ilo":"totalidad iti pannakaparsua a mairaman dagiti planeta, dagiti bituen, dagiti ariwanas, dagiti linaon ti intergalaktiko a limbang, ken amin a banag ken enerhia","cs":"ve\u0161kerenstvo","tl":"planeta, mga bituin, mga galaksiya, mga nilalaman ng intergalaktikong kalawakan, at lahat ng materya at enerhiya","oc":"ensemble de tot \u00e7\u00f2 qu'exit\u00eds","af":"al die planete, sterre, sterrestelsels en intergalaktiese ruimtes, asook alle energie en materie","sr":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-ec":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-el":"sveukupnost planeta, zvezda, galaksija, intergalakti\u010dkog prostora, i sve materije i energije","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"},"aliases":{"pl":["Kosmos","\u015awiat","Natura","Uniwersum"],"en":["cosmos","The Universe","Space"],"es":["cosmos"],"de":["Weltall","All","Kosmos"],"fr":["Cosmos"],"eo":["Kosmo"],"it":["cosmo","spazio"],"nl":["universum","kosmos","cosmos"],"ca":["cosmos"],"fi":["universumi","kaikkeus"],"hu":["univerzum"],"sv":["Kosmos"],"nds":["Universum","Kosmos"],"fa":["\u062c\u0647\u0627\u0646","\u0639\u0627\u0644\u0645","\u0686\u0631\u062e \u06af\u0631\u062f\u0648\u0646","\u06a9\u06cc\u0647\u0627\u0646","\u06a9\u0627\u06cc\u0646\u0627\u062a","\u0647\u0633\u062a\u06cc"],"ta":["\u0baa\u0bbf\u0bb0\u0baa\u0b9e\u0bcd\u0b9a\u0bae\u0bcd","\u0baa\u0bc7\u0bb0\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0baa\u0bb2\u0bcd\u0bb2\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0b85\u0ba3\u0bcd\u0b9f\u0bb5\u0bc6\u0bb3\u0bbf"],"pt":["Universo","cosmos"],"ml":["\u0d32\u0d4b\u0d15\u0d02","\u0d05\u0d23\u0d4d\u0d21\u0d15\u0d1f\u0d3e\u0d39\u0d02","\u0d2c\u0d4d\u0d30\u0d39\u0d4d\u0d2e\u0d3e\u0d23\u0d4d\u0d21\u0d02"],"ilo":["uniberso","universo"],"cs":["ve\u0161kerenstvo","univerzum"],"my":["\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c (Q1) [edit] \u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"]},"links":{"enwiki":{"name":"Universe","badges":[]},"dewiki":{"name":"Universum","badges":[]},"hrwiki":{"name":"Svemir","badges":[]},"frwiki":{"name":"Univers","badges":[]},"lawiki":{"name":"Universum","badges":[]},"ptwiki":{"name":"Universo","badges":[]},"fiwiki":{"name":"Maailmankaikkeus","badges":[]},"simplewiki":{"name":"Universe","badges":[]},"jawiki":{"name":"\u5b87\u5b99","badges":[]},"eswiki":{"name":"Universo","badges":[]},"itwiki":{"name":"Universo","badges":[]},"huwiki":{"name":"Vil\u00e1gegyetem","badges":[]},"eowiki":{"name":"Universo","badges":[]},"cawiki":{"name":"Univers","badges":[]},"nlwiki":{"name":"Heelal","badges":[]},"rowiki":{"name":"Univers","badges":[]},"svwiki":{"name":"Universum","badges":[]},"plwiki":{"name":"Wszech\u015bwiat","badges":[]},"glwiki":{"name":"Universo","badges":[]},"euwiki":{"name":"Unibertso","badges":[]},"mkwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"dawiki":{"name":"Universet","badges":[]},"brwiki":{"name":"Hollved","badges":[]},"etwiki":{"name":"Universum","badges":[]},"afwiki":{"name":"Heelal","badges":[]},"cywiki":{"name":"Bydysawd (seryddiaeth)","badges":[]},"iowiki":{"name":"Universo","badges":[]},"iawiki":{"name":"Universo","badges":[]},"iswiki":{"name":"Alheimurinn","badges":[]},"nnwiki":{"name":"Universet","badges":[]},"nowiki":{"name":"Universet","badges":[]},"trwiki":{"name":"Evren","badges":[]},"uzwiki":{"name":"Olam","badges":[]},"ruwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"cswiki":{"name":"Vesm\u00edr","badges":[]},"skwiki":{"name":"Vesm\u00edr","badges":[]},"ukwiki":{"name":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"anwiki":{"name":"Universo","badges":[]},"azwiki":{"name":"Kainat","badges":[]},"astwiki":{"name":"Universu","badges":[]},"gnwiki":{"name":"Arapy","badges":[]},"bswiki":{"name":"Svemir","badges":[]},"snwiki":{"name":"Rudunhumwe","badges":[]},"nvwiki":{"name":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","badges":[]},"dsbwiki":{"name":"Uniwersum","badges":[]},"hifwiki":{"name":"Sansaar","badges":[]},"fowiki":{"name":"Alheimurin","badges":[]},"fywiki":{"name":"Hielal","badges":[]},"gawiki":{"name":"An Chruinne","badges":[]},"hakwiki":{"name":"Y\u00ee-chhiu","badges":[]},"idwiki":{"name":"Alam semesta","badges":[]},"jvwiki":{"name":"Alam semesta","badges":[]},"pamwiki":{"name":"Sikluban","badges":[]},"csbwiki":{"name":"Swiatnica","badges":[]},"swwiki":{"name":"Ulimwengu","badges":[]},"htwiki":{"name":"Liniv\u00e8","badges":[]},"kuwiki":{"name":"Gerd\u00fbn","badges":[]},"lvwiki":{"name":"Visums","badges":[]},"ltwiki":{"name":"Visata","badges":[]},"liwiki":{"name":"Universum","badges":[]},"lmowiki":{"name":"\u00dcnivers","badges":[]},"mswiki":{"name":"Alam semesta","badges":[]},"mwlwiki":{"name":"Ouniberso","badges":[]},"nahwiki":{"name":"Cem\u0101n\u0101huac","badges":[]},"nds_nlwiki":{"name":"Hielal","badges":[]},"napwiki":{"name":"Annevierzo","badges":[]},"frrwiki":{"name":"\u00c5\u00e5l","badges":[]},"nrmwiki":{"name":"Eunivers","badges":[]},"novwiki":{"name":"Universe","badges":[]},"ocwiki":{"name":"Univ\u00e8rs","badges":[]},"pflwiki":{"name":"Weldall","badges":[]},"papwiki":{"name":"Universo","badges":[]},"ndswiki":{"name":"Weltruum","badges":[]},"quwiki":{"name":"Ch'askancha","badges":[]},"stqwiki":{"name":"Al","badges":[]},"sqwiki":{"name":"Gjith\u00ebsia","badges":[]},"scnwiki":{"name":"Universu","badges":[]},"slwiki":{"name":"Vesolje","badges":[]},"shwiki":{"name":"Svemir","badges":[]},"suwiki":{"name":"Jagat","badges":[]},"tlwiki":{"name":"Sansinukob","badges":[]},"warwiki":{"name":"Sangkalibutan","badges":[]},"bat_smgwiki":{"name":"V\u0117sata","badges":[]},"viwiki":{"name":"V\u0169 tr\u1ee5","badges":[]},"zh_min_nanwiki":{"name":"\u00da-ti\u016b","badges":[]},"bnwiki":{"name":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","badges":[]},"arwiki":{"name":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","badges":[]},"arcwiki":{"name":"\u072c\u0712\u071d\u0720","badges":[]},"arzwiki":{"name":"\u0643\u0648\u0646","badges":[]},"bewiki":{"name":"\u0421\u0443\u0441\u0432\u0435\u0442","badges":[]},"bgwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"ckbwiki":{"name":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","badges":[]},"cvwiki":{"name":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","badges":[]},"elwiki":{"name":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"fawiki":{"name":"\u06af\u06cc\u062a\u06cc","badges":[]},"guwiki":{"name":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","badges":[]},"hewiki":{"name":"\u05d4\u05d9\u05e7\u05d5\u05dd","badges":[]},"hiwiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"kawiki":{"name":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","badges":[]},"kkwiki":{"name":"\u04d8\u043b\u0435\u043c","badges":[]},"knwiki":{"name":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","badges":[]},"kowiki":{"name":"\uc6b0\uc8fc","badges":[]},"lezwiki":{"name":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","badges":[]},"mlwiki":{"name":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","badges":[]},"mnwiki":{"name":"\u041e\u0440\u0447\u043b\u043e\u043d","badges":[]},"mrwiki":{"name":"\u0935\u093f\u0936\u094d\u0935","badges":[]},"mywiki":{"name":"\u1005\u1000\u103c\u101d\u1020\u102c","badges":[]},"newiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"pnbwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"ruewiki":{"name":"\u0412\u0435\u0441\u043c\u0456\u0440","badges":[]},"srwiki":{"name":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"tawiki":{"name":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","badges":[]},"tewiki":{"name":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","badges":[]},"tgwiki":{"name":"\u041a\u043e\u0438\u043d\u043e\u0442","badges":[]},"thwiki":{"name":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","badges":[]},"ttwiki":{"name":"\u0413\u0430\u043b\u04d9\u043c","badges":[]},"urwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"xmfwiki":{"name":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","badges":[]},"yiwiki":{"name":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","badges":[]},"zhwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_classicalwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_yuewiki":{"name":"\u5b87\u5b99","badges":[]},"be_x_oldwiki":{"name":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","badges":[]},"siwiki":{"name":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","badges":[]},"ilowiki":{"name":"Law-ang","badges":[]},"hywiki":{"name":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","badges":[]},"vepwiki":{"name":"Mir","badges":[]},"barwiki":{"name":"W\u00f6dall","badges":[]},"pmswiki":{"name":"Univers","badges":[]},"bawiki":{"name":"\u0492\u0430\u043b\u04d9\u043c","badges":[]},"scowiki":{"name":"Universe","badges":[]},"sowiki":{"name":"Koon","badges":[]},"commonswiki":{"name":"Univers","badges":[]}},"entity":["item",1],"claims":[{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":323}],"q":[],"g":"Q1$e70e289c-471e-36b8-50ff-25612cf24e70","rank":1,"refs":[]},{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":273508}],"q":[],"g":"Q1$7b881a36-4708-3c1e-f05d-fd4eb0322087","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":223557}],"q":[],"g":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":1454986}],"q":[],"g":"q1$442901db-4168-e229-2509-ec9d59d99531","rank":1,"refs":[]},{"m":["value",227,"string","4079154-3"],"q":[],"g":"q1$4E4479B7-920C-4AB3-A405-5F3A2168DE91","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":48183}]]]},{"m":["value",373,"string","Universe"],"q":[],"g":"q1$BD33C4D4-8E79-40FA-BB26-475CA5E732CE","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":328}]]]},{"m":["value",508,"string","7239"],"q":[],"g":"q1$766D285D-5EA2-49FA-BDDE-915E3851ECFD","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":460907}]]]},{"m":["value",18,"string","Hubble ultra deep field.jpg"],"q":[],"g":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","rank":1,"refs":[]},{"m":["value",910,"wikibase-entityid",{"entity-type":"item","numeric-id":5551050}],"q":[],"g":"Q1$41A4AA15-DF3F-49C9-842C-A2AF0BBCAAD0","rank":1,"refs":[]},{"m":["value",349,"string","00574074"],"q":[],"g":"Q1$E0551ECA-8ADE-46E0-AAE7-2C4685C91E89","rank":1,"refs":[]},{"m":["value",361,"wikibase-entityid",{"entity-type":"item","numeric-id":3327819}],"q":[["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":41719}]],"g":"q1$21f31f42-4f4d-79b0-0380-92039776e884","rank":0,"refs":[]},{"m":["value",580,"time",{"time":"-13800000000-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":1,"calendarmodel":"http:\/\/www.wikidata.org\/entity\/Q1985727"}],"q":[["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":15605}],["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":76250}],["value",805,"wikibase-entityid",{"entity-type":"item","numeric-id":500699}]],"g":"Q1$789eef0c-4108-cdda-1a63-505cdd324564","rank":1,"refs":[[["value",248,"wikibase-entityid",{"entity-type":"item","numeric-id":15217920}]]]}]} 9zkqp405sha9pekr1dx47c9feka2045 wikibase-item application/json 110652868 110652822 2014-02-19T23:34:34Z 201.110.113.186 /* wbsetlabel-set:1|en */el mundo de teeabithia {"label":{"en":"el mundo de teeabithia","fr":"Univers","la":"universum","uz":"Olam","ru":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","pl":"Wszech\u015bwiat","nb":"Universet","eo":"universo","it":"universo","es":"universo","de":"Universum","ca":"univers","en-gb":"Universe","de-ch":"Universum","fi":"maailmankaikkeus","nn":"Universet","ja":"\u5b87\u5b99","zh-hant":"\u5b87\u5b99","hr":"Svemir","pt":"universo","simple":"Universe","hu":"vil\u00e1gegyetem","nl":"heelal","ro":"univers","sv":"Universum","gl":"universo","eu":"Unibertso","mk":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","da":"Universet","br":"Hollved","et":"Universum","af":"heelal","cy":"Bydysawd","io":"Universo","ia":"Universo","is":"Alheimurinn","tr":"Evren","cs":"vesm\u00edr","sk":"Vesm\u00edr","uk":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","an":"Universo","az":"Kainat","ast":"Universu","gn":"Arapy","bs":"Svemir","sn":"Rudunhumwe","nv":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","dsb":"Uniwersum","hif":"Sansaar","fo":"Alheimurin","fy":"Hielal","ga":"An Chruinne","hak":"Y\u00ee-chhiu","id":"Alam semesta","jv":"Alam semesta","pam":"Sikluban","csb":"Swiatnica","sw":"Ulimwengu","ht":"Liniv\u00e8","ku":"Gerd\u00fbn","lv":"Visums","lt":"Visata","li":"Universum","lmo":"\u00dcnivers","ms":"Alam semesta","mwl":"Ouniberso","nah":"Cem\u0101n\u0101huac","nds-nl":"Hielal","nap":"Annevierzo","frr":"\u00c5\u00e5l","nrm":"Eunivers","nov":"Universe","oc":"Univ\u00e8rs","pfl":"Weldall","pap":"Universo","nds":"Weltruum","qu":"Ch'askancha","stq":"Al","sq":"Gjith\u00ebsia","scn":"Universu","sl":"Vesolje","sh":"Svemir","su":"Jagat","tl":"Uniberso","war":"Sangkalibutan","bat-smg":"V\u0117sata","vi":"v\u0169 tr\u1ee5","zh-min-nan":"\u00da-ti\u016b","bn":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","ar":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","arc":"\u072c\u0712\u071d\u0720","arz":"\u0643\u0648\u0646","be":"\u0421\u0443\u0441\u0432\u0435\u0442","bg":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","ckb":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","cv":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","el":"\u03c3\u03cd\u03bc\u03c0\u03b1\u03bd","fa":"\u06af\u06cc\u062a\u06cc","gu":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","he":"\u05d4\u05d9\u05e7\u05d5\u05dd","hi":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","hy":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","ka":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","kk":"\u04d8\u043b\u0435\u043c","kn":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","ko":"\uc6b0\uc8fc","lez":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","ml":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","mn":"\u041e\u0440\u0447\u043b\u043e\u043d","mr":"\u0935\u093f\u0936\u094d\u0935","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c","ne":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","pnb":"\u06a9\u0627\u0626\u0646\u0627\u062a","rue":"\u0412\u0435\u0441\u043c\u0456\u0440","sr":"\u0421\u0432\u0435\u043c\u0438\u0440","ta":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","te":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","tg":"\u041a\u043e\u0438\u043d\u043e\u0442","th":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","tt":"\u0413\u0430\u043b\u04d9\u043c","ur":"\u06a9\u0627\u0626\u0646\u0627\u062a","xmf":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","yi":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","zh":"\u5b87\u5b99","zh-classical":"\u5b87\u5b99","zh-yue":"\u5b87\u5b99","en-ca":"Universe","pt-br":"universo","yue":"\u5b87\u5b99","zh-cn":"\u5b87\u5b99","zh-hans":"\u5b87\u5b99","zh-sg":"\u5b87\u5b99","zh-my":"\u5b87\u5b99","zh-hk":"\u5b87\u5b99","zh-tw":"\u5b87\u5b99","zh-mo":"\u5b87\u5b99","de-formal":"Universum","si":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","be-x-old":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","ilo":"law-ang","jbo":"munje","vep":"Mir","be-tarask":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","bar":"W\u00f6dall","pms":"Univers","sr-ec":"\u0421\u0432\u0435\u043c\u0438\u0440","sr-el":"Svemir","sco":"Universe","or":"\u0b2c\u0b4d\u0b30\u0b39\u0b4d\u0b2e\u0b3e\u0b23\u0b4d\u0b21"},"description":{"la":"res quae omnem materiam et spatium continet","en":"me da comezon","fr":"ensemble des plan\u00e8tes, des \u00e9toiles, des galaxies, de l'espace intergalactique, ainsi que de toute la mati\u00e8re et de l'\u00e9nergie","pl":"Wszystko, co fizycznie istnieje: ca\u0142a przestrze\u0144, czas, wszystkie formy materii i energii oraz prawa fizyki i sta\u0142e fizyczne okre\u015blaj\u0105ce ich zachowanie.","es":"totalidad del espacio-tiempo, la materia y la energ\u00eda existentes","de":"Gesamtheit aller Planeten, Sterne, Galaxien, des intergalaktischen Raums, und aller Materie und Energie","it":"insieme di tutto ci\u00f2 che esiste","eo":"la tuto de \u0109io ekzistanta, steloj, spaco, materio, energio ...","no":"alt som eksisterer av rom, materie og str\u00e5ling","nb":"alt som eksisterer av rom, materie og str\u00e5ling.","nn":"alt som eksisterer, derfor all fysisk masse og energi, planetar, stjerner, galaksar, og alt i det intergalaktiske rommet","en-gb":"The totality of planets, stars, galaxies, intergalactic space, and all matter and energy","nl":"alle materie en energie binnen het gehele ruimte-tijdcontinu\u00fcm waarin wij bestaan","ko":"\ubb34\ud55c\ud55c \uc2dc\uac04\uacfc \ub9cc\ubb3c\uc744 \ud3ec\ud568\ud558\uace0 \uc788\ub294 \ub05d\uc5c6\ub294 \uacf5\uac04\uc758 \ucd1d\uccb4","ca":"totalitat de planetes, estrelles, gal\u00e0xies, espai intergal\u00e0ctic i tota la mat\u00e8ria i energia","fi":"avaruuden ja siin\u00e4 olevan aineen ja energian muodostama kokonaisuus","ru":"\u0444\u0443\u043d\u0434\u0430\u043c\u0435\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043d\u044f\u0442\u0438\u0435 \u0432 \u0430\u0441\u0442\u0440\u043e\u043d\u043e\u043c\u0438\u0438 \u0438 \u0444\u0438\u043b\u043e\u0441\u043e\u0444\u0438\u0438","zh-hans":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hant":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-cn":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-sg":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-my":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hk":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-tw":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-mo":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","ja":"\u60d1\u661f\u3001\u6052\u661f\u3001\u9280\u6cb3\u3001\u9280\u6cb3\u9593\u7a7a\u9593\u3001\u5168\u3066\u306e\u7269\u8cea\u3068\u30a8\u30cd\u30eb\u30ae\u30fc\u306e\u7dcf\u4f53","tr":"y\u0131ld\u0131zlar, gezegenler, gaz, toz, galaksileraras\u0131 madde ve k\u0131saca her \u015fey","uk":"\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c \u0443\u0441\u044c\u043e\u0433\u043e, \u0449\u043e \u0456\u0441\u043d\u0443\u0454: \u0447\u0430\u0441, \u043f\u0440\u043e\u0441\u0442\u0456\u0440, \u043c\u0430\u0442\u0435\u0440\u0456\u044f, \u0435\u043d\u0435\u0440\u0433\u0456\u044f","pt-br":"Tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia.","ta":"\u0bb5\u0bc6\u0bb3\u0bbf \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0b95\u0bbe\u0bb2\u0bae\u0bcd \u0b86\u0b95\u0bbf\u0baf\u0bb5\u0bb1\u0bcd\u0bb1\u0bbf\u0ba9\u0bcd \u0bae\u0bc1\u0bb4\u0bc1\u0bae\u0bc8","ro":"totalitatea planetelor, stelelor, galaxiilor, spa\u0163iului intergalactic \u015fi al materiei \u015fi energiei","da":"Universet defineres almindeligvis som alt eksisterende, inklusiv planeter, stjerner, galakser, indholdet af det intergalaktiske rum, og alt stof og energi.","fa":"\u0645\u062c\u0645\u0648\u0639\u0647 \u0633\u06cc\u0627\u0631\u0647\u200c\u0647\u0627\u060c \u0633\u062a\u0627\u0631\u06af\u0627\u0646\u060c \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627\u060c \u0641\u0636\u0627\u06cc \u0645\u06cc\u0627\u0646 \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627 \u0648 \u0647\u0645\u0647 \u0645\u0627\u062f\u0647 \u0648 \u0627\u0646\u0631\u0698\u06cc","sv":"Det utrymme som per definition inneh\u00e5ller allting, det vill s\u00e4ga all materia, energi, rumtiden, naturlagarna och alla h\u00e4ndelser.","pt":"tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia","ml":"\u0d2d\u0d57\u0d24\u0d3f\u0d15\u0d2e\u0d3e\u0d2f\u0d3f \u0d28\u0d3f\u0d32\u0d28\u0d3f\u0d7d\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d0e\u0d32\u0d4d\u0d32\u0d3e\u0d02 \u0d1a\u0d47\u0d7c\u0d28\u0d4d\u0d28\u0d24\u0d3e\u0d23\u0d4d \u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","ilo":"totalidad iti pannakaparsua a mairaman dagiti planeta, dagiti bituen, dagiti ariwanas, dagiti linaon ti intergalaktiko a limbang, ken amin a banag ken enerhia","cs":"ve\u0161kerenstvo","tl":"planeta, mga bituin, mga galaksiya, mga nilalaman ng intergalaktikong kalawakan, at lahat ng materya at enerhiya","oc":"ensemble de tot \u00e7\u00f2 qu'exit\u00eds","af":"al die planete, sterre, sterrestelsels en intergalaktiese ruimtes, asook alle energie en materie","sr":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-ec":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-el":"sveukupnost planeta, zvezda, galaksija, intergalakti\u010dkog prostora, i sve materije i energije","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"},"aliases":{"pl":["Kosmos","\u015awiat","Natura","Uniwersum"],"en":["cosmos","The Universe","Space"],"es":["cosmos"],"de":["Weltall","All","Kosmos"],"fr":["Cosmos"],"eo":["Kosmo"],"it":["cosmo","spazio"],"nl":["universum","kosmos","cosmos"],"ca":["cosmos"],"fi":["universumi","kaikkeus"],"hu":["univerzum"],"sv":["Kosmos"],"nds":["Universum","Kosmos"],"fa":["\u062c\u0647\u0627\u0646","\u0639\u0627\u0644\u0645","\u0686\u0631\u062e \u06af\u0631\u062f\u0648\u0646","\u06a9\u06cc\u0647\u0627\u0646","\u06a9\u0627\u06cc\u0646\u0627\u062a","\u0647\u0633\u062a\u06cc"],"ta":["\u0baa\u0bbf\u0bb0\u0baa\u0b9e\u0bcd\u0b9a\u0bae\u0bcd","\u0baa\u0bc7\u0bb0\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0baa\u0bb2\u0bcd\u0bb2\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0b85\u0ba3\u0bcd\u0b9f\u0bb5\u0bc6\u0bb3\u0bbf"],"pt":["Universo","cosmos"],"ml":["\u0d32\u0d4b\u0d15\u0d02","\u0d05\u0d23\u0d4d\u0d21\u0d15\u0d1f\u0d3e\u0d39\u0d02","\u0d2c\u0d4d\u0d30\u0d39\u0d4d\u0d2e\u0d3e\u0d23\u0d4d\u0d21\u0d02"],"ilo":["uniberso","universo"],"cs":["ve\u0161kerenstvo","univerzum"],"my":["\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c (Q1) [edit] \u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"]},"links":{"enwiki":{"name":"Universe","badges":[]},"dewiki":{"name":"Universum","badges":[]},"hrwiki":{"name":"Svemir","badges":[]},"frwiki":{"name":"Univers","badges":[]},"lawiki":{"name":"Universum","badges":[]},"ptwiki":{"name":"Universo","badges":[]},"fiwiki":{"name":"Maailmankaikkeus","badges":[]},"simplewiki":{"name":"Universe","badges":[]},"jawiki":{"name":"\u5b87\u5b99","badges":[]},"eswiki":{"name":"Universo","badges":[]},"itwiki":{"name":"Universo","badges":[]},"huwiki":{"name":"Vil\u00e1gegyetem","badges":[]},"eowiki":{"name":"Universo","badges":[]},"cawiki":{"name":"Univers","badges":[]},"nlwiki":{"name":"Heelal","badges":[]},"rowiki":{"name":"Univers","badges":[]},"svwiki":{"name":"Universum","badges":[]},"plwiki":{"name":"Wszech\u015bwiat","badges":[]},"glwiki":{"name":"Universo","badges":[]},"euwiki":{"name":"Unibertso","badges":[]},"mkwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"dawiki":{"name":"Universet","badges":[]},"brwiki":{"name":"Hollved","badges":[]},"etwiki":{"name":"Universum","badges":[]},"afwiki":{"name":"Heelal","badges":[]},"cywiki":{"name":"Bydysawd (seryddiaeth)","badges":[]},"iowiki":{"name":"Universo","badges":[]},"iawiki":{"name":"Universo","badges":[]},"iswiki":{"name":"Alheimurinn","badges":[]},"nnwiki":{"name":"Universet","badges":[]},"nowiki":{"name":"Universet","badges":[]},"trwiki":{"name":"Evren","badges":[]},"uzwiki":{"name":"Olam","badges":[]},"ruwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"cswiki":{"name":"Vesm\u00edr","badges":[]},"skwiki":{"name":"Vesm\u00edr","badges":[]},"ukwiki":{"name":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"anwiki":{"name":"Universo","badges":[]},"azwiki":{"name":"Kainat","badges":[]},"astwiki":{"name":"Universu","badges":[]},"gnwiki":{"name":"Arapy","badges":[]},"bswiki":{"name":"Svemir","badges":[]},"snwiki":{"name":"Rudunhumwe","badges":[]},"nvwiki":{"name":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","badges":[]},"dsbwiki":{"name":"Uniwersum","badges":[]},"hifwiki":{"name":"Sansaar","badges":[]},"fowiki":{"name":"Alheimurin","badges":[]},"fywiki":{"name":"Hielal","badges":[]},"gawiki":{"name":"An Chruinne","badges":[]},"hakwiki":{"name":"Y\u00ee-chhiu","badges":[]},"idwiki":{"name":"Alam semesta","badges":[]},"jvwiki":{"name":"Alam semesta","badges":[]},"pamwiki":{"name":"Sikluban","badges":[]},"csbwiki":{"name":"Swiatnica","badges":[]},"swwiki":{"name":"Ulimwengu","badges":[]},"htwiki":{"name":"Liniv\u00e8","badges":[]},"kuwiki":{"name":"Gerd\u00fbn","badges":[]},"lvwiki":{"name":"Visums","badges":[]},"ltwiki":{"name":"Visata","badges":[]},"liwiki":{"name":"Universum","badges":[]},"lmowiki":{"name":"\u00dcnivers","badges":[]},"mswiki":{"name":"Alam semesta","badges":[]},"mwlwiki":{"name":"Ouniberso","badges":[]},"nahwiki":{"name":"Cem\u0101n\u0101huac","badges":[]},"nds_nlwiki":{"name":"Hielal","badges":[]},"napwiki":{"name":"Annevierzo","badges":[]},"frrwiki":{"name":"\u00c5\u00e5l","badges":[]},"nrmwiki":{"name":"Eunivers","badges":[]},"novwiki":{"name":"Universe","badges":[]},"ocwiki":{"name":"Univ\u00e8rs","badges":[]},"pflwiki":{"name":"Weldall","badges":[]},"papwiki":{"name":"Universo","badges":[]},"ndswiki":{"name":"Weltruum","badges":[]},"quwiki":{"name":"Ch'askancha","badges":[]},"stqwiki":{"name":"Al","badges":[]},"sqwiki":{"name":"Gjith\u00ebsia","badges":[]},"scnwiki":{"name":"Universu","badges":[]},"slwiki":{"name":"Vesolje","badges":[]},"shwiki":{"name":"Svemir","badges":[]},"suwiki":{"name":"Jagat","badges":[]},"tlwiki":{"name":"Sansinukob","badges":[]},"warwiki":{"name":"Sangkalibutan","badges":[]},"bat_smgwiki":{"name":"V\u0117sata","badges":[]},"viwiki":{"name":"V\u0169 tr\u1ee5","badges":[]},"zh_min_nanwiki":{"name":"\u00da-ti\u016b","badges":[]},"bnwiki":{"name":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","badges":[]},"arwiki":{"name":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","badges":[]},"arcwiki":{"name":"\u072c\u0712\u071d\u0720","badges":[]},"arzwiki":{"name":"\u0643\u0648\u0646","badges":[]},"bewiki":{"name":"\u0421\u0443\u0441\u0432\u0435\u0442","badges":[]},"bgwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"ckbwiki":{"name":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","badges":[]},"cvwiki":{"name":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","badges":[]},"elwiki":{"name":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"fawiki":{"name":"\u06af\u06cc\u062a\u06cc","badges":[]},"guwiki":{"name":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","badges":[]},"hewiki":{"name":"\u05d4\u05d9\u05e7\u05d5\u05dd","badges":[]},"hiwiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"kawiki":{"name":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","badges":[]},"kkwiki":{"name":"\u04d8\u043b\u0435\u043c","badges":[]},"knwiki":{"name":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","badges":[]},"kowiki":{"name":"\uc6b0\uc8fc","badges":[]},"lezwiki":{"name":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","badges":[]},"mlwiki":{"name":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","badges":[]},"mnwiki":{"name":"\u041e\u0440\u0447\u043b\u043e\u043d","badges":[]},"mrwiki":{"name":"\u0935\u093f\u0936\u094d\u0935","badges":[]},"mywiki":{"name":"\u1005\u1000\u103c\u101d\u1020\u102c","badges":[]},"newiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"pnbwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"ruewiki":{"name":"\u0412\u0435\u0441\u043c\u0456\u0440","badges":[]},"srwiki":{"name":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"tawiki":{"name":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","badges":[]},"tewiki":{"name":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","badges":[]},"tgwiki":{"name":"\u041a\u043e\u0438\u043d\u043e\u0442","badges":[]},"thwiki":{"name":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","badges":[]},"ttwiki":{"name":"\u0413\u0430\u043b\u04d9\u043c","badges":[]},"urwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"xmfwiki":{"name":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","badges":[]},"yiwiki":{"name":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","badges":[]},"zhwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_classicalwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_yuewiki":{"name":"\u5b87\u5b99","badges":[]},"be_x_oldwiki":{"name":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","badges":[]},"siwiki":{"name":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","badges":[]},"ilowiki":{"name":"Law-ang","badges":[]},"hywiki":{"name":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","badges":[]},"vepwiki":{"name":"Mir","badges":[]},"barwiki":{"name":"W\u00f6dall","badges":[]},"pmswiki":{"name":"Univers","badges":[]},"bawiki":{"name":"\u0492\u0430\u043b\u04d9\u043c","badges":[]},"scowiki":{"name":"Universe","badges":[]},"sowiki":{"name":"Koon","badges":[]},"commonswiki":{"name":"Univers","badges":[]}},"entity":["item",1],"claims":[{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":323}],"q":[],"g":"Q1$e70e289c-471e-36b8-50ff-25612cf24e70","rank":1,"refs":[]},{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":273508}],"q":[],"g":"Q1$7b881a36-4708-3c1e-f05d-fd4eb0322087","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":223557}],"q":[],"g":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":1454986}],"q":[],"g":"q1$442901db-4168-e229-2509-ec9d59d99531","rank":1,"refs":[]},{"m":["value",227,"string","4079154-3"],"q":[],"g":"q1$4E4479B7-920C-4AB3-A405-5F3A2168DE91","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":48183}]]]},{"m":["value",373,"string","Universe"],"q":[],"g":"q1$BD33C4D4-8E79-40FA-BB26-475CA5E732CE","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":328}]]]},{"m":["value",508,"string","7239"],"q":[],"g":"q1$766D285D-5EA2-49FA-BDDE-915E3851ECFD","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":460907}]]]},{"m":["value",18,"string","Hubble ultra deep field.jpg"],"q":[],"g":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","rank":1,"refs":[]},{"m":["value",910,"wikibase-entityid",{"entity-type":"item","numeric-id":5551050}],"q":[],"g":"Q1$41A4AA15-DF3F-49C9-842C-A2AF0BBCAAD0","rank":1,"refs":[]},{"m":["value",349,"string","00574074"],"q":[],"g":"Q1$E0551ECA-8ADE-46E0-AAE7-2C4685C91E89","rank":1,"refs":[]},{"m":["value",361,"wikibase-entityid",{"entity-type":"item","numeric-id":3327819}],"q":[["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":41719}]],"g":"q1$21f31f42-4f4d-79b0-0380-92039776e884","rank":0,"refs":[]},{"m":["value",580,"time",{"time":"-13800000000-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":1,"calendarmodel":"http:\/\/www.wikidata.org\/entity\/Q1985727"}],"q":[["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":15605}],["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":76250}],["value",805,"wikibase-entityid",{"entity-type":"item","numeric-id":500699}]],"g":"Q1$789eef0c-4108-cdda-1a63-505cdd324564","rank":1,"refs":[[["value",248,"wikibase-entityid",{"entity-type":"item","numeric-id":15217920}]]]}]} jpt6jq8173x4a549apys1rc79cqwnrk wikibase-item application/json 110652942 110652868 2014-02-19T23:35:05Z 201.110.113.186 /* wbsetdescription-set:1|es */todo lo que nos rodea {"label":{"en":"el mundo de teeabithia","fr":"Univers","la":"universum","uz":"Olam","ru":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","pl":"Wszech\u015bwiat","nb":"Universet","eo":"universo","it":"universo","es":"universo","de":"Universum","ca":"univers","en-gb":"Universe","de-ch":"Universum","fi":"maailmankaikkeus","nn":"Universet","ja":"\u5b87\u5b99","zh-hant":"\u5b87\u5b99","hr":"Svemir","pt":"universo","simple":"Universe","hu":"vil\u00e1gegyetem","nl":"heelal","ro":"univers","sv":"Universum","gl":"universo","eu":"Unibertso","mk":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","da":"Universet","br":"Hollved","et":"Universum","af":"heelal","cy":"Bydysawd","io":"Universo","ia":"Universo","is":"Alheimurinn","tr":"Evren","cs":"vesm\u00edr","sk":"Vesm\u00edr","uk":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","an":"Universo","az":"Kainat","ast":"Universu","gn":"Arapy","bs":"Svemir","sn":"Rudunhumwe","nv":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","dsb":"Uniwersum","hif":"Sansaar","fo":"Alheimurin","fy":"Hielal","ga":"An Chruinne","hak":"Y\u00ee-chhiu","id":"Alam semesta","jv":"Alam semesta","pam":"Sikluban","csb":"Swiatnica","sw":"Ulimwengu","ht":"Liniv\u00e8","ku":"Gerd\u00fbn","lv":"Visums","lt":"Visata","li":"Universum","lmo":"\u00dcnivers","ms":"Alam semesta","mwl":"Ouniberso","nah":"Cem\u0101n\u0101huac","nds-nl":"Hielal","nap":"Annevierzo","frr":"\u00c5\u00e5l","nrm":"Eunivers","nov":"Universe","oc":"Univ\u00e8rs","pfl":"Weldall","pap":"Universo","nds":"Weltruum","qu":"Ch'askancha","stq":"Al","sq":"Gjith\u00ebsia","scn":"Universu","sl":"Vesolje","sh":"Svemir","su":"Jagat","tl":"Uniberso","war":"Sangkalibutan","bat-smg":"V\u0117sata","vi":"v\u0169 tr\u1ee5","zh-min-nan":"\u00da-ti\u016b","bn":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","ar":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","arc":"\u072c\u0712\u071d\u0720","arz":"\u0643\u0648\u0646","be":"\u0421\u0443\u0441\u0432\u0435\u0442","bg":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","ckb":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","cv":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","el":"\u03c3\u03cd\u03bc\u03c0\u03b1\u03bd","fa":"\u06af\u06cc\u062a\u06cc","gu":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","he":"\u05d4\u05d9\u05e7\u05d5\u05dd","hi":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","hy":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","ka":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","kk":"\u04d8\u043b\u0435\u043c","kn":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","ko":"\uc6b0\uc8fc","lez":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","ml":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","mn":"\u041e\u0440\u0447\u043b\u043e\u043d","mr":"\u0935\u093f\u0936\u094d\u0935","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c","ne":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","pnb":"\u06a9\u0627\u0626\u0646\u0627\u062a","rue":"\u0412\u0435\u0441\u043c\u0456\u0440","sr":"\u0421\u0432\u0435\u043c\u0438\u0440","ta":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","te":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","tg":"\u041a\u043e\u0438\u043d\u043e\u0442","th":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","tt":"\u0413\u0430\u043b\u04d9\u043c","ur":"\u06a9\u0627\u0626\u0646\u0627\u062a","xmf":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","yi":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","zh":"\u5b87\u5b99","zh-classical":"\u5b87\u5b99","zh-yue":"\u5b87\u5b99","en-ca":"Universe","pt-br":"universo","yue":"\u5b87\u5b99","zh-cn":"\u5b87\u5b99","zh-hans":"\u5b87\u5b99","zh-sg":"\u5b87\u5b99","zh-my":"\u5b87\u5b99","zh-hk":"\u5b87\u5b99","zh-tw":"\u5b87\u5b99","zh-mo":"\u5b87\u5b99","de-formal":"Universum","si":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","be-x-old":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","ilo":"law-ang","jbo":"munje","vep":"Mir","be-tarask":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","bar":"W\u00f6dall","pms":"Univers","sr-ec":"\u0421\u0432\u0435\u043c\u0438\u0440","sr-el":"Svemir","sco":"Universe","or":"\u0b2c\u0b4d\u0b30\u0b39\u0b4d\u0b2e\u0b3e\u0b23\u0b4d\u0b21"},"description":{"la":"res quae omnem materiam et spatium continet","en":"me da comezon","fr":"ensemble des plan\u00e8tes, des \u00e9toiles, des galaxies, de l'espace intergalactique, ainsi que de toute la mati\u00e8re et de l'\u00e9nergie","pl":"Wszystko, co fizycznie istnieje: ca\u0142a przestrze\u0144, czas, wszystkie formy materii i energii oraz prawa fizyki i sta\u0142e fizyczne okre\u015blaj\u0105ce ich zachowanie.","es":"todo lo que nos rodea","de":"Gesamtheit aller Planeten, Sterne, Galaxien, des intergalaktischen Raums, und aller Materie und Energie","it":"insieme di tutto ci\u00f2 che esiste","eo":"la tuto de \u0109io ekzistanta, steloj, spaco, materio, energio ...","no":"alt som eksisterer av rom, materie og str\u00e5ling","nb":"alt som eksisterer av rom, materie og str\u00e5ling.","nn":"alt som eksisterer, derfor all fysisk masse og energi, planetar, stjerner, galaksar, og alt i det intergalaktiske rommet","en-gb":"The totality of planets, stars, galaxies, intergalactic space, and all matter and energy","nl":"alle materie en energie binnen het gehele ruimte-tijdcontinu\u00fcm waarin wij bestaan","ko":"\ubb34\ud55c\ud55c \uc2dc\uac04\uacfc \ub9cc\ubb3c\uc744 \ud3ec\ud568\ud558\uace0 \uc788\ub294 \ub05d\uc5c6\ub294 \uacf5\uac04\uc758 \ucd1d\uccb4","ca":"totalitat de planetes, estrelles, gal\u00e0xies, espai intergal\u00e0ctic i tota la mat\u00e8ria i energia","fi":"avaruuden ja siin\u00e4 olevan aineen ja energian muodostama kokonaisuus","ru":"\u0444\u0443\u043d\u0434\u0430\u043c\u0435\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043d\u044f\u0442\u0438\u0435 \u0432 \u0430\u0441\u0442\u0440\u043e\u043d\u043e\u043c\u0438\u0438 \u0438 \u0444\u0438\u043b\u043e\u0441\u043e\u0444\u0438\u0438","zh-hans":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hant":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-cn":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-sg":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-my":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hk":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-tw":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-mo":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","ja":"\u60d1\u661f\u3001\u6052\u661f\u3001\u9280\u6cb3\u3001\u9280\u6cb3\u9593\u7a7a\u9593\u3001\u5168\u3066\u306e\u7269\u8cea\u3068\u30a8\u30cd\u30eb\u30ae\u30fc\u306e\u7dcf\u4f53","tr":"y\u0131ld\u0131zlar, gezegenler, gaz, toz, galaksileraras\u0131 madde ve k\u0131saca her \u015fey","uk":"\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c \u0443\u0441\u044c\u043e\u0433\u043e, \u0449\u043e \u0456\u0441\u043d\u0443\u0454: \u0447\u0430\u0441, \u043f\u0440\u043e\u0441\u0442\u0456\u0440, \u043c\u0430\u0442\u0435\u0440\u0456\u044f, \u0435\u043d\u0435\u0440\u0433\u0456\u044f","pt-br":"Tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia.","ta":"\u0bb5\u0bc6\u0bb3\u0bbf \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0b95\u0bbe\u0bb2\u0bae\u0bcd \u0b86\u0b95\u0bbf\u0baf\u0bb5\u0bb1\u0bcd\u0bb1\u0bbf\u0ba9\u0bcd \u0bae\u0bc1\u0bb4\u0bc1\u0bae\u0bc8","ro":"totalitatea planetelor, stelelor, galaxiilor, spa\u0163iului intergalactic \u015fi al materiei \u015fi energiei","da":"Universet defineres almindeligvis som alt eksisterende, inklusiv planeter, stjerner, galakser, indholdet af det intergalaktiske rum, og alt stof og energi.","fa":"\u0645\u062c\u0645\u0648\u0639\u0647 \u0633\u06cc\u0627\u0631\u0647\u200c\u0647\u0627\u060c \u0633\u062a\u0627\u0631\u06af\u0627\u0646\u060c \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627\u060c \u0641\u0636\u0627\u06cc \u0645\u06cc\u0627\u0646 \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627 \u0648 \u0647\u0645\u0647 \u0645\u0627\u062f\u0647 \u0648 \u0627\u0646\u0631\u0698\u06cc","sv":"Det utrymme som per definition inneh\u00e5ller allting, det vill s\u00e4ga all materia, energi, rumtiden, naturlagarna och alla h\u00e4ndelser.","pt":"tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia","ml":"\u0d2d\u0d57\u0d24\u0d3f\u0d15\u0d2e\u0d3e\u0d2f\u0d3f \u0d28\u0d3f\u0d32\u0d28\u0d3f\u0d7d\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d0e\u0d32\u0d4d\u0d32\u0d3e\u0d02 \u0d1a\u0d47\u0d7c\u0d28\u0d4d\u0d28\u0d24\u0d3e\u0d23\u0d4d \u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","ilo":"totalidad iti pannakaparsua a mairaman dagiti planeta, dagiti bituen, dagiti ariwanas, dagiti linaon ti intergalaktiko a limbang, ken amin a banag ken enerhia","cs":"ve\u0161kerenstvo","tl":"planeta, mga bituin, mga galaksiya, mga nilalaman ng intergalaktikong kalawakan, at lahat ng materya at enerhiya","oc":"ensemble de tot \u00e7\u00f2 qu'exit\u00eds","af":"al die planete, sterre, sterrestelsels en intergalaktiese ruimtes, asook alle energie en materie","sr":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-ec":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-el":"sveukupnost planeta, zvezda, galaksija, intergalakti\u010dkog prostora, i sve materije i energije","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"},"aliases":{"pl":["Kosmos","\u015awiat","Natura","Uniwersum"],"en":["cosmos","The Universe","Space"],"es":["cosmos"],"de":["Weltall","All","Kosmos"],"fr":["Cosmos"],"eo":["Kosmo"],"it":["cosmo","spazio"],"nl":["universum","kosmos","cosmos"],"ca":["cosmos"],"fi":["universumi","kaikkeus"],"hu":["univerzum"],"sv":["Kosmos"],"nds":["Universum","Kosmos"],"fa":["\u062c\u0647\u0627\u0646","\u0639\u0627\u0644\u0645","\u0686\u0631\u062e \u06af\u0631\u062f\u0648\u0646","\u06a9\u06cc\u0647\u0627\u0646","\u06a9\u0627\u06cc\u0646\u0627\u062a","\u0647\u0633\u062a\u06cc"],"ta":["\u0baa\u0bbf\u0bb0\u0baa\u0b9e\u0bcd\u0b9a\u0bae\u0bcd","\u0baa\u0bc7\u0bb0\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0baa\u0bb2\u0bcd\u0bb2\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0b85\u0ba3\u0bcd\u0b9f\u0bb5\u0bc6\u0bb3\u0bbf"],"pt":["Universo","cosmos"],"ml":["\u0d32\u0d4b\u0d15\u0d02","\u0d05\u0d23\u0d4d\u0d21\u0d15\u0d1f\u0d3e\u0d39\u0d02","\u0d2c\u0d4d\u0d30\u0d39\u0d4d\u0d2e\u0d3e\u0d23\u0d4d\u0d21\u0d02"],"ilo":["uniberso","universo"],"cs":["ve\u0161kerenstvo","univerzum"],"my":["\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c (Q1) [edit] \u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"]},"links":{"enwiki":{"name":"Universe","badges":[]},"dewiki":{"name":"Universum","badges":[]},"hrwiki":{"name":"Svemir","badges":[]},"frwiki":{"name":"Univers","badges":[]},"lawiki":{"name":"Universum","badges":[]},"ptwiki":{"name":"Universo","badges":[]},"fiwiki":{"name":"Maailmankaikkeus","badges":[]},"simplewiki":{"name":"Universe","badges":[]},"jawiki":{"name":"\u5b87\u5b99","badges":[]},"eswiki":{"name":"Universo","badges":[]},"itwiki":{"name":"Universo","badges":[]},"huwiki":{"name":"Vil\u00e1gegyetem","badges":[]},"eowiki":{"name":"Universo","badges":[]},"cawiki":{"name":"Univers","badges":[]},"nlwiki":{"name":"Heelal","badges":[]},"rowiki":{"name":"Univers","badges":[]},"svwiki":{"name":"Universum","badges":[]},"plwiki":{"name":"Wszech\u015bwiat","badges":[]},"glwiki":{"name":"Universo","badges":[]},"euwiki":{"name":"Unibertso","badges":[]},"mkwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"dawiki":{"name":"Universet","badges":[]},"brwiki":{"name":"Hollved","badges":[]},"etwiki":{"name":"Universum","badges":[]},"afwiki":{"name":"Heelal","badges":[]},"cywiki":{"name":"Bydysawd (seryddiaeth)","badges":[]},"iowiki":{"name":"Universo","badges":[]},"iawiki":{"name":"Universo","badges":[]},"iswiki":{"name":"Alheimurinn","badges":[]},"nnwiki":{"name":"Universet","badges":[]},"nowiki":{"name":"Universet","badges":[]},"trwiki":{"name":"Evren","badges":[]},"uzwiki":{"name":"Olam","badges":[]},"ruwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"cswiki":{"name":"Vesm\u00edr","badges":[]},"skwiki":{"name":"Vesm\u00edr","badges":[]},"ukwiki":{"name":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"anwiki":{"name":"Universo","badges":[]},"azwiki":{"name":"Kainat","badges":[]},"astwiki":{"name":"Universu","badges":[]},"gnwiki":{"name":"Arapy","badges":[]},"bswiki":{"name":"Svemir","badges":[]},"snwiki":{"name":"Rudunhumwe","badges":[]},"nvwiki":{"name":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","badges":[]},"dsbwiki":{"name":"Uniwersum","badges":[]},"hifwiki":{"name":"Sansaar","badges":[]},"fowiki":{"name":"Alheimurin","badges":[]},"fywiki":{"name":"Hielal","badges":[]},"gawiki":{"name":"An Chruinne","badges":[]},"hakwiki":{"name":"Y\u00ee-chhiu","badges":[]},"idwiki":{"name":"Alam semesta","badges":[]},"jvwiki":{"name":"Alam semesta","badges":[]},"pamwiki":{"name":"Sikluban","badges":[]},"csbwiki":{"name":"Swiatnica","badges":[]},"swwiki":{"name":"Ulimwengu","badges":[]},"htwiki":{"name":"Liniv\u00e8","badges":[]},"kuwiki":{"name":"Gerd\u00fbn","badges":[]},"lvwiki":{"name":"Visums","badges":[]},"ltwiki":{"name":"Visata","badges":[]},"liwiki":{"name":"Universum","badges":[]},"lmowiki":{"name":"\u00dcnivers","badges":[]},"mswiki":{"name":"Alam semesta","badges":[]},"mwlwiki":{"name":"Ouniberso","badges":[]},"nahwiki":{"name":"Cem\u0101n\u0101huac","badges":[]},"nds_nlwiki":{"name":"Hielal","badges":[]},"napwiki":{"name":"Annevierzo","badges":[]},"frrwiki":{"name":"\u00c5\u00e5l","badges":[]},"nrmwiki":{"name":"Eunivers","badges":[]},"novwiki":{"name":"Universe","badges":[]},"ocwiki":{"name":"Univ\u00e8rs","badges":[]},"pflwiki":{"name":"Weldall","badges":[]},"papwiki":{"name":"Universo","badges":[]},"ndswiki":{"name":"Weltruum","badges":[]},"quwiki":{"name":"Ch'askancha","badges":[]},"stqwiki":{"name":"Al","badges":[]},"sqwiki":{"name":"Gjith\u00ebsia","badges":[]},"scnwiki":{"name":"Universu","badges":[]},"slwiki":{"name":"Vesolje","badges":[]},"shwiki":{"name":"Svemir","badges":[]},"suwiki":{"name":"Jagat","badges":[]},"tlwiki":{"name":"Sansinukob","badges":[]},"warwiki":{"name":"Sangkalibutan","badges":[]},"bat_smgwiki":{"name":"V\u0117sata","badges":[]},"viwiki":{"name":"V\u0169 tr\u1ee5","badges":[]},"zh_min_nanwiki":{"name":"\u00da-ti\u016b","badges":[]},"bnwiki":{"name":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","badges":[]},"arwiki":{"name":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","badges":[]},"arcwiki":{"name":"\u072c\u0712\u071d\u0720","badges":[]},"arzwiki":{"name":"\u0643\u0648\u0646","badges":[]},"bewiki":{"name":"\u0421\u0443\u0441\u0432\u0435\u0442","badges":[]},"bgwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"ckbwiki":{"name":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","badges":[]},"cvwiki":{"name":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","badges":[]},"elwiki":{"name":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"fawiki":{"name":"\u06af\u06cc\u062a\u06cc","badges":[]},"guwiki":{"name":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","badges":[]},"hewiki":{"name":"\u05d4\u05d9\u05e7\u05d5\u05dd","badges":[]},"hiwiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"kawiki":{"name":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","badges":[]},"kkwiki":{"name":"\u04d8\u043b\u0435\u043c","badges":[]},"knwiki":{"name":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","badges":[]},"kowiki":{"name":"\uc6b0\uc8fc","badges":[]},"lezwiki":{"name":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","badges":[]},"mlwiki":{"name":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","badges":[]},"mnwiki":{"name":"\u041e\u0440\u0447\u043b\u043e\u043d","badges":[]},"mrwiki":{"name":"\u0935\u093f\u0936\u094d\u0935","badges":[]},"mywiki":{"name":"\u1005\u1000\u103c\u101d\u1020\u102c","badges":[]},"newiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"pnbwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"ruewiki":{"name":"\u0412\u0435\u0441\u043c\u0456\u0440","badges":[]},"srwiki":{"name":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"tawiki":{"name":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","badges":[]},"tewiki":{"name":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","badges":[]},"tgwiki":{"name":"\u041a\u043e\u0438\u043d\u043e\u0442","badges":[]},"thwiki":{"name":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","badges":[]},"ttwiki":{"name":"\u0413\u0430\u043b\u04d9\u043c","badges":[]},"urwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"xmfwiki":{"name":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","badges":[]},"yiwiki":{"name":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","badges":[]},"zhwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_classicalwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_yuewiki":{"name":"\u5b87\u5b99","badges":[]},"be_x_oldwiki":{"name":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","badges":[]},"siwiki":{"name":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","badges":[]},"ilowiki":{"name":"Law-ang","badges":[]},"hywiki":{"name":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","badges":[]},"vepwiki":{"name":"Mir","badges":[]},"barwiki":{"name":"W\u00f6dall","badges":[]},"pmswiki":{"name":"Univers","badges":[]},"bawiki":{"name":"\u0492\u0430\u043b\u04d9\u043c","badges":[]},"scowiki":{"name":"Universe","badges":[]},"sowiki":{"name":"Koon","badges":[]},"commonswiki":{"name":"Univers","badges":[]}},"entity":["item",1],"claims":[{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":323}],"q":[],"g":"Q1$e70e289c-471e-36b8-50ff-25612cf24e70","rank":1,"refs":[]},{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":273508}],"q":[],"g":"Q1$7b881a36-4708-3c1e-f05d-fd4eb0322087","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":223557}],"q":[],"g":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":1454986}],"q":[],"g":"q1$442901db-4168-e229-2509-ec9d59d99531","rank":1,"refs":[]},{"m":["value",227,"string","4079154-3"],"q":[],"g":"q1$4E4479B7-920C-4AB3-A405-5F3A2168DE91","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":48183}]]]},{"m":["value",373,"string","Universe"],"q":[],"g":"q1$BD33C4D4-8E79-40FA-BB26-475CA5E732CE","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":328}]]]},{"m":["value",508,"string","7239"],"q":[],"g":"q1$766D285D-5EA2-49FA-BDDE-915E3851ECFD","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":460907}]]]},{"m":["value",18,"string","Hubble ultra deep field.jpg"],"q":[],"g":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","rank":1,"refs":[]},{"m":["value",910,"wikibase-entityid",{"entity-type":"item","numeric-id":5551050}],"q":[],"g":"Q1$41A4AA15-DF3F-49C9-842C-A2AF0BBCAAD0","rank":1,"refs":[]},{"m":["value",349,"string","00574074"],"q":[],"g":"Q1$E0551ECA-8ADE-46E0-AAE7-2C4685C91E89","rank":1,"refs":[]},{"m":["value",361,"wikibase-entityid",{"entity-type":"item","numeric-id":3327819}],"q":[["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":41719}]],"g":"q1$21f31f42-4f4d-79b0-0380-92039776e884","rank":0,"refs":[]},{"m":["value",580,"time",{"time":"-13800000000-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":1,"calendarmodel":"http:\/\/www.wikidata.org\/entity\/Q1985727"}],"q":[["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":15605}],["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":76250}],["value",805,"wikibase-entityid",{"entity-type":"item","numeric-id":500699}]],"g":"Q1$789eef0c-4108-cdda-1a63-505cdd324564","rank":1,"refs":[[["value",248,"wikibase-entityid",{"entity-type":"item","numeric-id":15217920}]]]}]} 5a53xqlns3wl6a69l7y4k21xi60jq41 wikibase-item application/json 110653008 110652942 2014-02-19T23:35:32Z 201.110.113.186 /* wbsetlabel-set:1|es */universo namekusei {"label":{"en":"el mundo de teeabithia","fr":"Univers","la":"universum","uz":"Olam","ru":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","pl":"Wszech\u015bwiat","nb":"Universet","eo":"universo","it":"universo","es":"universo namekusei","de":"Universum","ca":"univers","en-gb":"Universe","de-ch":"Universum","fi":"maailmankaikkeus","nn":"Universet","ja":"\u5b87\u5b99","zh-hant":"\u5b87\u5b99","hr":"Svemir","pt":"universo","simple":"Universe","hu":"vil\u00e1gegyetem","nl":"heelal","ro":"univers","sv":"Universum","gl":"universo","eu":"Unibertso","mk":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","da":"Universet","br":"Hollved","et":"Universum","af":"heelal","cy":"Bydysawd","io":"Universo","ia":"Universo","is":"Alheimurinn","tr":"Evren","cs":"vesm\u00edr","sk":"Vesm\u00edr","uk":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","an":"Universo","az":"Kainat","ast":"Universu","gn":"Arapy","bs":"Svemir","sn":"Rudunhumwe","nv":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","dsb":"Uniwersum","hif":"Sansaar","fo":"Alheimurin","fy":"Hielal","ga":"An Chruinne","hak":"Y\u00ee-chhiu","id":"Alam semesta","jv":"Alam semesta","pam":"Sikluban","csb":"Swiatnica","sw":"Ulimwengu","ht":"Liniv\u00e8","ku":"Gerd\u00fbn","lv":"Visums","lt":"Visata","li":"Universum","lmo":"\u00dcnivers","ms":"Alam semesta","mwl":"Ouniberso","nah":"Cem\u0101n\u0101huac","nds-nl":"Hielal","nap":"Annevierzo","frr":"\u00c5\u00e5l","nrm":"Eunivers","nov":"Universe","oc":"Univ\u00e8rs","pfl":"Weldall","pap":"Universo","nds":"Weltruum","qu":"Ch'askancha","stq":"Al","sq":"Gjith\u00ebsia","scn":"Universu","sl":"Vesolje","sh":"Svemir","su":"Jagat","tl":"Uniberso","war":"Sangkalibutan","bat-smg":"V\u0117sata","vi":"v\u0169 tr\u1ee5","zh-min-nan":"\u00da-ti\u016b","bn":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","ar":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","arc":"\u072c\u0712\u071d\u0720","arz":"\u0643\u0648\u0646","be":"\u0421\u0443\u0441\u0432\u0435\u0442","bg":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","ckb":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","cv":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","el":"\u03c3\u03cd\u03bc\u03c0\u03b1\u03bd","fa":"\u06af\u06cc\u062a\u06cc","gu":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","he":"\u05d4\u05d9\u05e7\u05d5\u05dd","hi":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","hy":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","ka":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","kk":"\u04d8\u043b\u0435\u043c","kn":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","ko":"\uc6b0\uc8fc","lez":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","ml":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","mn":"\u041e\u0440\u0447\u043b\u043e\u043d","mr":"\u0935\u093f\u0936\u094d\u0935","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c","ne":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","pnb":"\u06a9\u0627\u0626\u0646\u0627\u062a","rue":"\u0412\u0435\u0441\u043c\u0456\u0440","sr":"\u0421\u0432\u0435\u043c\u0438\u0440","ta":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","te":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","tg":"\u041a\u043e\u0438\u043d\u043e\u0442","th":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","tt":"\u0413\u0430\u043b\u04d9\u043c","ur":"\u06a9\u0627\u0626\u0646\u0627\u062a","xmf":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","yi":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","zh":"\u5b87\u5b99","zh-classical":"\u5b87\u5b99","zh-yue":"\u5b87\u5b99","en-ca":"Universe","pt-br":"universo","yue":"\u5b87\u5b99","zh-cn":"\u5b87\u5b99","zh-hans":"\u5b87\u5b99","zh-sg":"\u5b87\u5b99","zh-my":"\u5b87\u5b99","zh-hk":"\u5b87\u5b99","zh-tw":"\u5b87\u5b99","zh-mo":"\u5b87\u5b99","de-formal":"Universum","si":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","be-x-old":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","ilo":"law-ang","jbo":"munje","vep":"Mir","be-tarask":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","bar":"W\u00f6dall","pms":"Univers","sr-ec":"\u0421\u0432\u0435\u043c\u0438\u0440","sr-el":"Svemir","sco":"Universe","or":"\u0b2c\u0b4d\u0b30\u0b39\u0b4d\u0b2e\u0b3e\u0b23\u0b4d\u0b21"},"description":{"la":"res quae omnem materiam et spatium continet","en":"me da comezon","fr":"ensemble des plan\u00e8tes, des \u00e9toiles, des galaxies, de l'espace intergalactique, ainsi que de toute la mati\u00e8re et de l'\u00e9nergie","pl":"Wszystko, co fizycznie istnieje: ca\u0142a przestrze\u0144, czas, wszystkie formy materii i energii oraz prawa fizyki i sta\u0142e fizyczne okre\u015blaj\u0105ce ich zachowanie.","es":"todo lo que nos rodea","de":"Gesamtheit aller Planeten, Sterne, Galaxien, des intergalaktischen Raums, und aller Materie und Energie","it":"insieme di tutto ci\u00f2 che esiste","eo":"la tuto de \u0109io ekzistanta, steloj, spaco, materio, energio ...","no":"alt som eksisterer av rom, materie og str\u00e5ling","nb":"alt som eksisterer av rom, materie og str\u00e5ling.","nn":"alt som eksisterer, derfor all fysisk masse og energi, planetar, stjerner, galaksar, og alt i det intergalaktiske rommet","en-gb":"The totality of planets, stars, galaxies, intergalactic space, and all matter and energy","nl":"alle materie en energie binnen het gehele ruimte-tijdcontinu\u00fcm waarin wij bestaan","ko":"\ubb34\ud55c\ud55c \uc2dc\uac04\uacfc \ub9cc\ubb3c\uc744 \ud3ec\ud568\ud558\uace0 \uc788\ub294 \ub05d\uc5c6\ub294 \uacf5\uac04\uc758 \ucd1d\uccb4","ca":"totalitat de planetes, estrelles, gal\u00e0xies, espai intergal\u00e0ctic i tota la mat\u00e8ria i energia","fi":"avaruuden ja siin\u00e4 olevan aineen ja energian muodostama kokonaisuus","ru":"\u0444\u0443\u043d\u0434\u0430\u043c\u0435\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043d\u044f\u0442\u0438\u0435 \u0432 \u0430\u0441\u0442\u0440\u043e\u043d\u043e\u043c\u0438\u0438 \u0438 \u0444\u0438\u043b\u043e\u0441\u043e\u0444\u0438\u0438","zh-hans":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hant":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-cn":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-sg":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-my":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hk":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-tw":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-mo":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","ja":"\u60d1\u661f\u3001\u6052\u661f\u3001\u9280\u6cb3\u3001\u9280\u6cb3\u9593\u7a7a\u9593\u3001\u5168\u3066\u306e\u7269\u8cea\u3068\u30a8\u30cd\u30eb\u30ae\u30fc\u306e\u7dcf\u4f53","tr":"y\u0131ld\u0131zlar, gezegenler, gaz, toz, galaksileraras\u0131 madde ve k\u0131saca her \u015fey","uk":"\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c \u0443\u0441\u044c\u043e\u0433\u043e, \u0449\u043e \u0456\u0441\u043d\u0443\u0454: \u0447\u0430\u0441, \u043f\u0440\u043e\u0441\u0442\u0456\u0440, \u043c\u0430\u0442\u0435\u0440\u0456\u044f, \u0435\u043d\u0435\u0440\u0433\u0456\u044f","pt-br":"Tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia.","ta":"\u0bb5\u0bc6\u0bb3\u0bbf \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0b95\u0bbe\u0bb2\u0bae\u0bcd \u0b86\u0b95\u0bbf\u0baf\u0bb5\u0bb1\u0bcd\u0bb1\u0bbf\u0ba9\u0bcd \u0bae\u0bc1\u0bb4\u0bc1\u0bae\u0bc8","ro":"totalitatea planetelor, stelelor, galaxiilor, spa\u0163iului intergalactic \u015fi al materiei \u015fi energiei","da":"Universet defineres almindeligvis som alt eksisterende, inklusiv planeter, stjerner, galakser, indholdet af det intergalaktiske rum, og alt stof og energi.","fa":"\u0645\u062c\u0645\u0648\u0639\u0647 \u0633\u06cc\u0627\u0631\u0647\u200c\u0647\u0627\u060c \u0633\u062a\u0627\u0631\u06af\u0627\u0646\u060c \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627\u060c \u0641\u0636\u0627\u06cc \u0645\u06cc\u0627\u0646 \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627 \u0648 \u0647\u0645\u0647 \u0645\u0627\u062f\u0647 \u0648 \u0627\u0646\u0631\u0698\u06cc","sv":"Det utrymme som per definition inneh\u00e5ller allting, det vill s\u00e4ga all materia, energi, rumtiden, naturlagarna och alla h\u00e4ndelser.","pt":"tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia","ml":"\u0d2d\u0d57\u0d24\u0d3f\u0d15\u0d2e\u0d3e\u0d2f\u0d3f \u0d28\u0d3f\u0d32\u0d28\u0d3f\u0d7d\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d0e\u0d32\u0d4d\u0d32\u0d3e\u0d02 \u0d1a\u0d47\u0d7c\u0d28\u0d4d\u0d28\u0d24\u0d3e\u0d23\u0d4d \u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","ilo":"totalidad iti pannakaparsua a mairaman dagiti planeta, dagiti bituen, dagiti ariwanas, dagiti linaon ti intergalaktiko a limbang, ken amin a banag ken enerhia","cs":"ve\u0161kerenstvo","tl":"planeta, mga bituin, mga galaksiya, mga nilalaman ng intergalaktikong kalawakan, at lahat ng materya at enerhiya","oc":"ensemble de tot \u00e7\u00f2 qu'exit\u00eds","af":"al die planete, sterre, sterrestelsels en intergalaktiese ruimtes, asook alle energie en materie","sr":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-ec":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-el":"sveukupnost planeta, zvezda, galaksija, intergalakti\u010dkog prostora, i sve materije i energije","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"},"aliases":{"pl":["Kosmos","\u015awiat","Natura","Uniwersum"],"en":["cosmos","The Universe","Space"],"es":["cosmos"],"de":["Weltall","All","Kosmos"],"fr":["Cosmos"],"eo":["Kosmo"],"it":["cosmo","spazio"],"nl":["universum","kosmos","cosmos"],"ca":["cosmos"],"fi":["universumi","kaikkeus"],"hu":["univerzum"],"sv":["Kosmos"],"nds":["Universum","Kosmos"],"fa":["\u062c\u0647\u0627\u0646","\u0639\u0627\u0644\u0645","\u0686\u0631\u062e \u06af\u0631\u062f\u0648\u0646","\u06a9\u06cc\u0647\u0627\u0646","\u06a9\u0627\u06cc\u0646\u0627\u062a","\u0647\u0633\u062a\u06cc"],"ta":["\u0baa\u0bbf\u0bb0\u0baa\u0b9e\u0bcd\u0b9a\u0bae\u0bcd","\u0baa\u0bc7\u0bb0\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0baa\u0bb2\u0bcd\u0bb2\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0b85\u0ba3\u0bcd\u0b9f\u0bb5\u0bc6\u0bb3\u0bbf"],"pt":["Universo","cosmos"],"ml":["\u0d32\u0d4b\u0d15\u0d02","\u0d05\u0d23\u0d4d\u0d21\u0d15\u0d1f\u0d3e\u0d39\u0d02","\u0d2c\u0d4d\u0d30\u0d39\u0d4d\u0d2e\u0d3e\u0d23\u0d4d\u0d21\u0d02"],"ilo":["uniberso","universo"],"cs":["ve\u0161kerenstvo","univerzum"],"my":["\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c (Q1) [edit] \u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"]},"links":{"enwiki":{"name":"Universe","badges":[]},"dewiki":{"name":"Universum","badges":[]},"hrwiki":{"name":"Svemir","badges":[]},"frwiki":{"name":"Univers","badges":[]},"lawiki":{"name":"Universum","badges":[]},"ptwiki":{"name":"Universo","badges":[]},"fiwiki":{"name":"Maailmankaikkeus","badges":[]},"simplewiki":{"name":"Universe","badges":[]},"jawiki":{"name":"\u5b87\u5b99","badges":[]},"eswiki":{"name":"Universo","badges":[]},"itwiki":{"name":"Universo","badges":[]},"huwiki":{"name":"Vil\u00e1gegyetem","badges":[]},"eowiki":{"name":"Universo","badges":[]},"cawiki":{"name":"Univers","badges":[]},"nlwiki":{"name":"Heelal","badges":[]},"rowiki":{"name":"Univers","badges":[]},"svwiki":{"name":"Universum","badges":[]},"plwiki":{"name":"Wszech\u015bwiat","badges":[]},"glwiki":{"name":"Universo","badges":[]},"euwiki":{"name":"Unibertso","badges":[]},"mkwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"dawiki":{"name":"Universet","badges":[]},"brwiki":{"name":"Hollved","badges":[]},"etwiki":{"name":"Universum","badges":[]},"afwiki":{"name":"Heelal","badges":[]},"cywiki":{"name":"Bydysawd (seryddiaeth)","badges":[]},"iowiki":{"name":"Universo","badges":[]},"iawiki":{"name":"Universo","badges":[]},"iswiki":{"name":"Alheimurinn","badges":[]},"nnwiki":{"name":"Universet","badges":[]},"nowiki":{"name":"Universet","badges":[]},"trwiki":{"name":"Evren","badges":[]},"uzwiki":{"name":"Olam","badges":[]},"ruwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"cswiki":{"name":"Vesm\u00edr","badges":[]},"skwiki":{"name":"Vesm\u00edr","badges":[]},"ukwiki":{"name":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"anwiki":{"name":"Universo","badges":[]},"azwiki":{"name":"Kainat","badges":[]},"astwiki":{"name":"Universu","badges":[]},"gnwiki":{"name":"Arapy","badges":[]},"bswiki":{"name":"Svemir","badges":[]},"snwiki":{"name":"Rudunhumwe","badges":[]},"nvwiki":{"name":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","badges":[]},"dsbwiki":{"name":"Uniwersum","badges":[]},"hifwiki":{"name":"Sansaar","badges":[]},"fowiki":{"name":"Alheimurin","badges":[]},"fywiki":{"name":"Hielal","badges":[]},"gawiki":{"name":"An Chruinne","badges":[]},"hakwiki":{"name":"Y\u00ee-chhiu","badges":[]},"idwiki":{"name":"Alam semesta","badges":[]},"jvwiki":{"name":"Alam semesta","badges":[]},"pamwiki":{"name":"Sikluban","badges":[]},"csbwiki":{"name":"Swiatnica","badges":[]},"swwiki":{"name":"Ulimwengu","badges":[]},"htwiki":{"name":"Liniv\u00e8","badges":[]},"kuwiki":{"name":"Gerd\u00fbn","badges":[]},"lvwiki":{"name":"Visums","badges":[]},"ltwiki":{"name":"Visata","badges":[]},"liwiki":{"name":"Universum","badges":[]},"lmowiki":{"name":"\u00dcnivers","badges":[]},"mswiki":{"name":"Alam semesta","badges":[]},"mwlwiki":{"name":"Ouniberso","badges":[]},"nahwiki":{"name":"Cem\u0101n\u0101huac","badges":[]},"nds_nlwiki":{"name":"Hielal","badges":[]},"napwiki":{"name":"Annevierzo","badges":[]},"frrwiki":{"name":"\u00c5\u00e5l","badges":[]},"nrmwiki":{"name":"Eunivers","badges":[]},"novwiki":{"name":"Universe","badges":[]},"ocwiki":{"name":"Univ\u00e8rs","badges":[]},"pflwiki":{"name":"Weldall","badges":[]},"papwiki":{"name":"Universo","badges":[]},"ndswiki":{"name":"Weltruum","badges":[]},"quwiki":{"name":"Ch'askancha","badges":[]},"stqwiki":{"name":"Al","badges":[]},"sqwiki":{"name":"Gjith\u00ebsia","badges":[]},"scnwiki":{"name":"Universu","badges":[]},"slwiki":{"name":"Vesolje","badges":[]},"shwiki":{"name":"Svemir","badges":[]},"suwiki":{"name":"Jagat","badges":[]},"tlwiki":{"name":"Sansinukob","badges":[]},"warwiki":{"name":"Sangkalibutan","badges":[]},"bat_smgwiki":{"name":"V\u0117sata","badges":[]},"viwiki":{"name":"V\u0169 tr\u1ee5","badges":[]},"zh_min_nanwiki":{"name":"\u00da-ti\u016b","badges":[]},"bnwiki":{"name":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","badges":[]},"arwiki":{"name":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","badges":[]},"arcwiki":{"name":"\u072c\u0712\u071d\u0720","badges":[]},"arzwiki":{"name":"\u0643\u0648\u0646","badges":[]},"bewiki":{"name":"\u0421\u0443\u0441\u0432\u0435\u0442","badges":[]},"bgwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"ckbwiki":{"name":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","badges":[]},"cvwiki":{"name":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","badges":[]},"elwiki":{"name":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"fawiki":{"name":"\u06af\u06cc\u062a\u06cc","badges":[]},"guwiki":{"name":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","badges":[]},"hewiki":{"name":"\u05d4\u05d9\u05e7\u05d5\u05dd","badges":[]},"hiwiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"kawiki":{"name":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","badges":[]},"kkwiki":{"name":"\u04d8\u043b\u0435\u043c","badges":[]},"knwiki":{"name":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","badges":[]},"kowiki":{"name":"\uc6b0\uc8fc","badges":[]},"lezwiki":{"name":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","badges":[]},"mlwiki":{"name":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","badges":[]},"mnwiki":{"name":"\u041e\u0440\u0447\u043b\u043e\u043d","badges":[]},"mrwiki":{"name":"\u0935\u093f\u0936\u094d\u0935","badges":[]},"mywiki":{"name":"\u1005\u1000\u103c\u101d\u1020\u102c","badges":[]},"newiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"pnbwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"ruewiki":{"name":"\u0412\u0435\u0441\u043c\u0456\u0440","badges":[]},"srwiki":{"name":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"tawiki":{"name":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","badges":[]},"tewiki":{"name":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","badges":[]},"tgwiki":{"name":"\u041a\u043e\u0438\u043d\u043e\u0442","badges":[]},"thwiki":{"name":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","badges":[]},"ttwiki":{"name":"\u0413\u0430\u043b\u04d9\u043c","badges":[]},"urwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"xmfwiki":{"name":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","badges":[]},"yiwiki":{"name":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","badges":[]},"zhwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_classicalwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_yuewiki":{"name":"\u5b87\u5b99","badges":[]},"be_x_oldwiki":{"name":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","badges":[]},"siwiki":{"name":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","badges":[]},"ilowiki":{"name":"Law-ang","badges":[]},"hywiki":{"name":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","badges":[]},"vepwiki":{"name":"Mir","badges":[]},"barwiki":{"name":"W\u00f6dall","badges":[]},"pmswiki":{"name":"Univers","badges":[]},"bawiki":{"name":"\u0492\u0430\u043b\u04d9\u043c","badges":[]},"scowiki":{"name":"Universe","badges":[]},"sowiki":{"name":"Koon","badges":[]},"commonswiki":{"name":"Univers","badges":[]}},"entity":["item",1],"claims":[{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":323}],"q":[],"g":"Q1$e70e289c-471e-36b8-50ff-25612cf24e70","rank":1,"refs":[]},{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":273508}],"q":[],"g":"Q1$7b881a36-4708-3c1e-f05d-fd4eb0322087","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":223557}],"q":[],"g":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":1454986}],"q":[],"g":"q1$442901db-4168-e229-2509-ec9d59d99531","rank":1,"refs":[]},{"m":["value",227,"string","4079154-3"],"q":[],"g":"q1$4E4479B7-920C-4AB3-A405-5F3A2168DE91","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":48183}]]]},{"m":["value",373,"string","Universe"],"q":[],"g":"q1$BD33C4D4-8E79-40FA-BB26-475CA5E732CE","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":328}]]]},{"m":["value",508,"string","7239"],"q":[],"g":"q1$766D285D-5EA2-49FA-BDDE-915E3851ECFD","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":460907}]]]},{"m":["value",18,"string","Hubble ultra deep field.jpg"],"q":[],"g":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","rank":1,"refs":[]},{"m":["value",910,"wikibase-entityid",{"entity-type":"item","numeric-id":5551050}],"q":[],"g":"Q1$41A4AA15-DF3F-49C9-842C-A2AF0BBCAAD0","rank":1,"refs":[]},{"m":["value",349,"string","00574074"],"q":[],"g":"Q1$E0551ECA-8ADE-46E0-AAE7-2C4685C91E89","rank":1,"refs":[]},{"m":["value",361,"wikibase-entityid",{"entity-type":"item","numeric-id":3327819}],"q":[["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":41719}]],"g":"q1$21f31f42-4f4d-79b0-0380-92039776e884","rank":0,"refs":[]},{"m":["value",580,"time",{"time":"-13800000000-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":1,"calendarmodel":"http:\/\/www.wikidata.org\/entity\/Q1985727"}],"q":[["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":15605}],["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":76250}],["value",805,"wikibase-entityid",{"entity-type":"item","numeric-id":500699}]],"g":"Q1$789eef0c-4108-cdda-1a63-505cdd324564","rank":1,"refs":[[["value",248,"wikibase-entityid",{"entity-type":"item","numeric-id":15217920}]]]}]} taomg84m7jjqd76wzqd0wewo7eo6ozm wikibase-item application/json 110669245 110653008 2014-02-20T01:44:07Z Izno 6810 Reverted edits by [[Special:Contributions/201.110.113.186|201.110.113.186]] ([[User talk:201.110.113.186|talk]]) to last revision by [[User:Thomas11|Thomas11]] {"label":{"en":"universe","fr":"Univers","la":"universum","uz":"Olam","ru":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","pl":"Wszech\u015bwiat","nb":"Universet","eo":"universo","it":"universo","es":"universo","de":"Universum","ca":"univers","en-gb":"Universe","de-ch":"Universum","fi":"maailmankaikkeus","nn":"Universet","ja":"\u5b87\u5b99","zh-hant":"\u5b87\u5b99","hr":"Svemir","pt":"universo","simple":"Universe","hu":"vil\u00e1gegyetem","nl":"heelal","ro":"univers","sv":"Universum","gl":"universo","eu":"Unibertso","mk":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","da":"Universet","br":"Hollved","et":"Universum","af":"heelal","cy":"Bydysawd","io":"Universo","ia":"Universo","is":"Alheimurinn","tr":"Evren","cs":"vesm\u00edr","sk":"Vesm\u00edr","uk":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","an":"Universo","az":"Kainat","ast":"Universu","gn":"Arapy","bs":"Svemir","sn":"Rudunhumwe","nv":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","dsb":"Uniwersum","hif":"Sansaar","fo":"Alheimurin","fy":"Hielal","ga":"An Chruinne","hak":"Y\u00ee-chhiu","id":"Alam semesta","jv":"Alam semesta","pam":"Sikluban","csb":"Swiatnica","sw":"Ulimwengu","ht":"Liniv\u00e8","ku":"Gerd\u00fbn","lv":"Visums","lt":"Visata","li":"Universum","lmo":"\u00dcnivers","ms":"Alam semesta","mwl":"Ouniberso","nah":"Cem\u0101n\u0101huac","nds-nl":"Hielal","nap":"Annevierzo","frr":"\u00c5\u00e5l","nrm":"Eunivers","nov":"Universe","oc":"Univ\u00e8rs","pfl":"Weldall","pap":"Universo","nds":"Weltruum","qu":"Ch'askancha","stq":"Al","sq":"Gjith\u00ebsia","scn":"Universu","sl":"Vesolje","sh":"Svemir","su":"Jagat","tl":"Uniberso","war":"Sangkalibutan","bat-smg":"V\u0117sata","vi":"v\u0169 tr\u1ee5","zh-min-nan":"\u00da-ti\u016b","bn":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","ar":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","arc":"\u072c\u0712\u071d\u0720","arz":"\u0643\u0648\u0646","be":"\u0421\u0443\u0441\u0432\u0435\u0442","bg":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","ckb":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","cv":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","el":"\u03c3\u03cd\u03bc\u03c0\u03b1\u03bd","fa":"\u06af\u06cc\u062a\u06cc","gu":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","he":"\u05d4\u05d9\u05e7\u05d5\u05dd","hi":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","hy":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","ka":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","kk":"\u04d8\u043b\u0435\u043c","kn":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","ko":"\uc6b0\uc8fc","lez":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","ml":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","mn":"\u041e\u0440\u0447\u043b\u043e\u043d","mr":"\u0935\u093f\u0936\u094d\u0935","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c","ne":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","pnb":"\u06a9\u0627\u0626\u0646\u0627\u062a","rue":"\u0412\u0435\u0441\u043c\u0456\u0440","sr":"\u0421\u0432\u0435\u043c\u0438\u0440","ta":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","te":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","tg":"\u041a\u043e\u0438\u043d\u043e\u0442","th":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","tt":"\u0413\u0430\u043b\u04d9\u043c","ur":"\u06a9\u0627\u0626\u0646\u0627\u062a","xmf":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","yi":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","zh":"\u5b87\u5b99","zh-classical":"\u5b87\u5b99","zh-yue":"\u5b87\u5b99","en-ca":"Universe","pt-br":"universo","yue":"\u5b87\u5b99","zh-cn":"\u5b87\u5b99","zh-hans":"\u5b87\u5b99","zh-sg":"\u5b87\u5b99","zh-my":"\u5b87\u5b99","zh-hk":"\u5b87\u5b99","zh-tw":"\u5b87\u5b99","zh-mo":"\u5b87\u5b99","de-formal":"Universum","si":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","be-x-old":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","ilo":"law-ang","jbo":"munje","vep":"Mir","be-tarask":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","bar":"W\u00f6dall","pms":"Univers","sr-ec":"\u0421\u0432\u0435\u043c\u0438\u0440","sr-el":"Svemir","sco":"Universe","or":"\u0b2c\u0b4d\u0b30\u0b39\u0b4d\u0b2e\u0b3e\u0b23\u0b4d\u0b21"},"description":{"la":"res quae omnem materiam et spatium continet","en":"totality of planets, stars, galaxies, intergalactic space, and all matter and energy","fr":"ensemble des plan\u00e8tes, des \u00e9toiles, des galaxies, de l'espace intergalactique, ainsi que de toute la mati\u00e8re et de l'\u00e9nergie","pl":"Wszystko, co fizycznie istnieje: ca\u0142a przestrze\u0144, czas, wszystkie formy materii i energii oraz prawa fizyki i sta\u0142e fizyczne okre\u015blaj\u0105ce ich zachowanie.","es":"totalidad del espacio-tiempo, la materia y la energ\u00eda existentes","de":"Gesamtheit aller Planeten, Sterne, Galaxien, des intergalaktischen Raums, und aller Materie und Energie","it":"insieme di tutto ci\u00f2 che esiste","eo":"la tuto de \u0109io ekzistanta, steloj, spaco, materio, energio ...","no":"alt som eksisterer av rom, materie og str\u00e5ling","nb":"alt som eksisterer av rom, materie og str\u00e5ling.","nn":"alt som eksisterer, derfor all fysisk masse og energi, planetar, stjerner, galaksar, og alt i det intergalaktiske rommet","en-gb":"The totality of planets, stars, galaxies, intergalactic space, and all matter and energy","nl":"alle materie en energie binnen het gehele ruimte-tijdcontinu\u00fcm waarin wij bestaan","ko":"\ubb34\ud55c\ud55c \uc2dc\uac04\uacfc \ub9cc\ubb3c\uc744 \ud3ec\ud568\ud558\uace0 \uc788\ub294 \ub05d\uc5c6\ub294 \uacf5\uac04\uc758 \ucd1d\uccb4","ca":"totalitat de planetes, estrelles, gal\u00e0xies, espai intergal\u00e0ctic i tota la mat\u00e8ria i energia","fi":"avaruuden ja siin\u00e4 olevan aineen ja energian muodostama kokonaisuus","ru":"\u0444\u0443\u043d\u0434\u0430\u043c\u0435\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043d\u044f\u0442\u0438\u0435 \u0432 \u0430\u0441\u0442\u0440\u043e\u043d\u043e\u043c\u0438\u0438 \u0438 \u0444\u0438\u043b\u043e\u0441\u043e\u0444\u0438\u0438","zh-hans":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hant":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-cn":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-sg":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-my":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u7edf\u4e00\u4f53","zh-hk":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-tw":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","zh-mo":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7d71\u4e00\u9ad4","ja":"\u60d1\u661f\u3001\u6052\u661f\u3001\u9280\u6cb3\u3001\u9280\u6cb3\u9593\u7a7a\u9593\u3001\u5168\u3066\u306e\u7269\u8cea\u3068\u30a8\u30cd\u30eb\u30ae\u30fc\u306e\u7dcf\u4f53","tr":"y\u0131ld\u0131zlar, gezegenler, gaz, toz, galaksileraras\u0131 madde ve k\u0131saca her \u015fey","uk":"\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c \u0443\u0441\u044c\u043e\u0433\u043e, \u0449\u043e \u0456\u0441\u043d\u0443\u0454: \u0447\u0430\u0441, \u043f\u0440\u043e\u0441\u0442\u0456\u0440, \u043c\u0430\u0442\u0435\u0440\u0456\u044f, \u0435\u043d\u0435\u0440\u0433\u0456\u044f","pt-br":"Tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia.","ta":"\u0bb5\u0bc6\u0bb3\u0bbf \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0b95\u0bbe\u0bb2\u0bae\u0bcd \u0b86\u0b95\u0bbf\u0baf\u0bb5\u0bb1\u0bcd\u0bb1\u0bbf\u0ba9\u0bcd \u0bae\u0bc1\u0bb4\u0bc1\u0bae\u0bc8","ro":"totalitatea planetelor, stelelor, galaxiilor, spa\u0163iului intergalactic \u015fi al materiei \u015fi energiei","da":"Universet defineres almindeligvis som alt eksisterende, inklusiv planeter, stjerner, galakser, indholdet af det intergalaktiske rum, og alt stof og energi.","fa":"\u0645\u062c\u0645\u0648\u0639\u0647 \u0633\u06cc\u0627\u0631\u0647\u200c\u0647\u0627\u060c \u0633\u062a\u0627\u0631\u06af\u0627\u0646\u060c \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627\u060c \u0641\u0636\u0627\u06cc \u0645\u06cc\u0627\u0646 \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627 \u0648 \u0647\u0645\u0647 \u0645\u0627\u062f\u0647 \u0648 \u0627\u0646\u0631\u0698\u06cc","sv":"Det utrymme som per definition inneh\u00e5ller allting, det vill s\u00e4ga all materia, energi, rumtiden, naturlagarna och alla h\u00e4ndelser.","pt":"tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia","ml":"\u0d2d\u0d57\u0d24\u0d3f\u0d15\u0d2e\u0d3e\u0d2f\u0d3f \u0d28\u0d3f\u0d32\u0d28\u0d3f\u0d7d\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d0e\u0d32\u0d4d\u0d32\u0d3e\u0d02 \u0d1a\u0d47\u0d7c\u0d28\u0d4d\u0d28\u0d24\u0d3e\u0d23\u0d4d \u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","ilo":"totalidad iti pannakaparsua a mairaman dagiti planeta, dagiti bituen, dagiti ariwanas, dagiti linaon ti intergalaktiko a limbang, ken amin a banag ken enerhia","cs":"ve\u0161kerenstvo","tl":"planeta, mga bituin, mga galaksiya, mga nilalaman ng intergalaktikong kalawakan, at lahat ng materya at enerhiya","oc":"ensemble de tot \u00e7\u00f2 qu'exit\u00eds","af":"al die planete, sterre, sterrestelsels en intergalaktiese ruimtes, asook alle energie en materie","sr":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-ec":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435","sr-el":"sveukupnost planeta, zvezda, galaksija, intergalakti\u010dkog prostora, i sve materije i energije","my":"\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"},"aliases":{"pl":["Kosmos","\u015awiat","Natura","Uniwersum"],"en":["cosmos","The Universe","Space"],"es":["cosmos"],"de":["Weltall","All","Kosmos"],"fr":["Cosmos"],"eo":["Kosmo"],"it":["cosmo","spazio"],"nl":["universum","kosmos","cosmos"],"ca":["cosmos"],"fi":["universumi","kaikkeus"],"hu":["univerzum"],"sv":["Kosmos"],"nds":["Universum","Kosmos"],"fa":["\u062c\u0647\u0627\u0646","\u0639\u0627\u0644\u0645","\u0686\u0631\u062e \u06af\u0631\u062f\u0648\u0646","\u06a9\u06cc\u0647\u0627\u0646","\u06a9\u0627\u06cc\u0646\u0627\u062a","\u0647\u0633\u062a\u06cc"],"ta":["\u0baa\u0bbf\u0bb0\u0baa\u0b9e\u0bcd\u0b9a\u0bae\u0bcd","\u0baa\u0bc7\u0bb0\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0baa\u0bb2\u0bcd\u0bb2\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","\u0b85\u0ba3\u0bcd\u0b9f\u0bb5\u0bc6\u0bb3\u0bbf"],"pt":["Universo","cosmos"],"ml":["\u0d32\u0d4b\u0d15\u0d02","\u0d05\u0d23\u0d4d\u0d21\u0d15\u0d1f\u0d3e\u0d39\u0d02","\u0d2c\u0d4d\u0d30\u0d39\u0d4d\u0d2e\u0d3e\u0d23\u0d4d\u0d21\u0d02"],"ilo":["uniberso","universo"],"cs":["ve\u0161kerenstvo","univerzum"],"my":["\u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c (Q1) [edit] \u1019\u103c\u1014\u103a\u1019\u102c\u1018\u102c\u101e\u102c"]},"links":{"enwiki":{"name":"Universe","badges":[]},"dewiki":{"name":"Universum","badges":[]},"hrwiki":{"name":"Svemir","badges":[]},"frwiki":{"name":"Univers","badges":[]},"lawiki":{"name":"Universum","badges":[]},"ptwiki":{"name":"Universo","badges":[]},"fiwiki":{"name":"Maailmankaikkeus","badges":[]},"simplewiki":{"name":"Universe","badges":[]},"jawiki":{"name":"\u5b87\u5b99","badges":[]},"eswiki":{"name":"Universo","badges":[]},"itwiki":{"name":"Universo","badges":[]},"huwiki":{"name":"Vil\u00e1gegyetem","badges":[]},"eowiki":{"name":"Universo","badges":[]},"cawiki":{"name":"Univers","badges":[]},"nlwiki":{"name":"Heelal","badges":[]},"rowiki":{"name":"Univers","badges":[]},"svwiki":{"name":"Universum","badges":[]},"plwiki":{"name":"Wszech\u015bwiat","badges":[]},"glwiki":{"name":"Universo","badges":[]},"euwiki":{"name":"Unibertso","badges":[]},"mkwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"dawiki":{"name":"Universet","badges":[]},"brwiki":{"name":"Hollved","badges":[]},"etwiki":{"name":"Universum","badges":[]},"afwiki":{"name":"Heelal","badges":[]},"cywiki":{"name":"Bydysawd (seryddiaeth)","badges":[]},"iowiki":{"name":"Universo","badges":[]},"iawiki":{"name":"Universo","badges":[]},"iswiki":{"name":"Alheimurinn","badges":[]},"nnwiki":{"name":"Universet","badges":[]},"nowiki":{"name":"Universet","badges":[]},"trwiki":{"name":"Evren","badges":[]},"uzwiki":{"name":"Olam","badges":[]},"ruwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"cswiki":{"name":"Vesm\u00edr","badges":[]},"skwiki":{"name":"Vesm\u00edr","badges":[]},"ukwiki":{"name":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"anwiki":{"name":"Universo","badges":[]},"azwiki":{"name":"Kainat","badges":[]},"astwiki":{"name":"Universu","badges":[]},"gnwiki":{"name":"Arapy","badges":[]},"bswiki":{"name":"Svemir","badges":[]},"snwiki":{"name":"Rudunhumwe","badges":[]},"nvwiki":{"name":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","badges":[]},"dsbwiki":{"name":"Uniwersum","badges":[]},"hifwiki":{"name":"Sansaar","badges":[]},"fowiki":{"name":"Alheimurin","badges":[]},"fywiki":{"name":"Hielal","badges":[]},"gawiki":{"name":"An Chruinne","badges":[]},"hakwiki":{"name":"Y\u00ee-chhiu","badges":[]},"idwiki":{"name":"Alam semesta","badges":[]},"jvwiki":{"name":"Alam semesta","badges":[]},"pamwiki":{"name":"Sikluban","badges":[]},"csbwiki":{"name":"Swiatnica","badges":[]},"swwiki":{"name":"Ulimwengu","badges":[]},"htwiki":{"name":"Liniv\u00e8","badges":[]},"kuwiki":{"name":"Gerd\u00fbn","badges":[]},"lvwiki":{"name":"Visums","badges":[]},"ltwiki":{"name":"Visata","badges":[]},"liwiki":{"name":"Universum","badges":[]},"lmowiki":{"name":"\u00dcnivers","badges":[]},"mswiki":{"name":"Alam semesta","badges":[]},"mwlwiki":{"name":"Ouniberso","badges":[]},"nahwiki":{"name":"Cem\u0101n\u0101huac","badges":[]},"nds_nlwiki":{"name":"Hielal","badges":[]},"napwiki":{"name":"Annevierzo","badges":[]},"frrwiki":{"name":"\u00c5\u00e5l","badges":[]},"nrmwiki":{"name":"Eunivers","badges":[]},"novwiki":{"name":"Universe","badges":[]},"ocwiki":{"name":"Univ\u00e8rs","badges":[]},"pflwiki":{"name":"Weldall","badges":[]},"papwiki":{"name":"Universo","badges":[]},"ndswiki":{"name":"Weltruum","badges":[]},"quwiki":{"name":"Ch'askancha","badges":[]},"stqwiki":{"name":"Al","badges":[]},"sqwiki":{"name":"Gjith\u00ebsia","badges":[]},"scnwiki":{"name":"Universu","badges":[]},"slwiki":{"name":"Vesolje","badges":[]},"shwiki":{"name":"Svemir","badges":[]},"suwiki":{"name":"Jagat","badges":[]},"tlwiki":{"name":"Sansinukob","badges":[]},"warwiki":{"name":"Sangkalibutan","badges":[]},"bat_smgwiki":{"name":"V\u0117sata","badges":[]},"viwiki":{"name":"V\u0169 tr\u1ee5","badges":[]},"zh_min_nanwiki":{"name":"\u00da-ti\u016b","badges":[]},"bnwiki":{"name":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","badges":[]},"arwiki":{"name":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","badges":[]},"arcwiki":{"name":"\u072c\u0712\u071d\u0720","badges":[]},"arzwiki":{"name":"\u0643\u0648\u0646","badges":[]},"bewiki":{"name":"\u0421\u0443\u0441\u0432\u0435\u0442","badges":[]},"bgwiki":{"name":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"ckbwiki":{"name":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","badges":[]},"cvwiki":{"name":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","badges":[]},"elwiki":{"name":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"fawiki":{"name":"\u06af\u06cc\u062a\u06cc","badges":[]},"guwiki":{"name":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","badges":[]},"hewiki":{"name":"\u05d4\u05d9\u05e7\u05d5\u05dd","badges":[]},"hiwiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"kawiki":{"name":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","badges":[]},"kkwiki":{"name":"\u04d8\u043b\u0435\u043c","badges":[]},"knwiki":{"name":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","badges":[]},"kowiki":{"name":"\uc6b0\uc8fc","badges":[]},"lezwiki":{"name":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","badges":[]},"mlwiki":{"name":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","badges":[]},"mnwiki":{"name":"\u041e\u0440\u0447\u043b\u043e\u043d","badges":[]},"mrwiki":{"name":"\u0935\u093f\u0936\u094d\u0935","badges":[]},"mywiki":{"name":"\u1005\u1000\u103c\u101d\u1020\u102c","badges":[]},"newiki":{"name":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"pnbwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"ruewiki":{"name":"\u0412\u0435\u0441\u043c\u0456\u0440","badges":[]},"srwiki":{"name":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"tawiki":{"name":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","badges":[]},"tewiki":{"name":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","badges":[]},"tgwiki":{"name":"\u041a\u043e\u0438\u043d\u043e\u0442","badges":[]},"thwiki":{"name":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","badges":[]},"ttwiki":{"name":"\u0413\u0430\u043b\u04d9\u043c","badges":[]},"urwiki":{"name":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"xmfwiki":{"name":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","badges":[]},"yiwiki":{"name":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","badges":[]},"zhwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_classicalwiki":{"name":"\u5b87\u5b99","badges":[]},"zh_yuewiki":{"name":"\u5b87\u5b99","badges":[]},"be_x_oldwiki":{"name":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","badges":[]},"siwiki":{"name":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","badges":[]},"ilowiki":{"name":"Law-ang","badges":[]},"hywiki":{"name":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","badges":[]},"vepwiki":{"name":"Mir","badges":[]},"barwiki":{"name":"W\u00f6dall","badges":[]},"pmswiki":{"name":"Univers","badges":[]},"bawiki":{"name":"\u0492\u0430\u043b\u04d9\u043c","badges":[]},"scowiki":{"name":"Universe","badges":[]},"sowiki":{"name":"Koon","badges":[]},"commonswiki":{"name":"Univers","badges":[]}},"entity":["item",1],"claims":[{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":323}],"q":[],"g":"Q1$e70e289c-471e-36b8-50ff-25612cf24e70","rank":1,"refs":[]},{"m":["value",793,"wikibase-entityid",{"entity-type":"item","numeric-id":273508}],"q":[],"g":"Q1$7b881a36-4708-3c1e-f05d-fd4eb0322087","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":223557}],"q":[],"g":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","rank":1,"refs":[]},{"m":["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":1454986}],"q":[],"g":"q1$442901db-4168-e229-2509-ec9d59d99531","rank":1,"refs":[]},{"m":["value",227,"string","4079154-3"],"q":[],"g":"q1$4E4479B7-920C-4AB3-A405-5F3A2168DE91","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":48183}]]]},{"m":["value",373,"string","Universe"],"q":[],"g":"q1$BD33C4D4-8E79-40FA-BB26-475CA5E732CE","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":328}]]]},{"m":["value",508,"string","7239"],"q":[],"g":"q1$766D285D-5EA2-49FA-BDDE-915E3851ECFD","rank":1,"refs":[[["value",143,"wikibase-entityid",{"entity-type":"item","numeric-id":460907}]]]},{"m":["value",18,"string","Hubble ultra deep field.jpg"],"q":[],"g":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","rank":1,"refs":[]},{"m":["value",910,"wikibase-entityid",{"entity-type":"item","numeric-id":5551050}],"q":[],"g":"Q1$41A4AA15-DF3F-49C9-842C-A2AF0BBCAAD0","rank":1,"refs":[]},{"m":["value",349,"string","00574074"],"q":[],"g":"Q1$E0551ECA-8ADE-46E0-AAE7-2C4685C91E89","rank":1,"refs":[]},{"m":["value",361,"wikibase-entityid",{"entity-type":"item","numeric-id":3327819}],"q":[["value",31,"wikibase-entityid",{"entity-type":"item","numeric-id":41719}]],"g":"q1$21f31f42-4f4d-79b0-0380-92039776e884","rank":0,"refs":[]},{"m":["value",580,"time",{"time":"-13800000000-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":1,"calendarmodel":"http:\/\/www.wikidata.org\/entity\/Q1985727"}],"q":[["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":15605}],["value",459,"wikibase-entityid",{"entity-type":"item","numeric-id":76250}],["value",805,"wikibase-entityid",{"entity-type":"item","numeric-id":500699}]],"g":"Q1$789eef0c-4108-cdda-1a63-505cdd324564","rank":1,"refs":[[["value",248,"wikibase-entityid",{"entity-type":"item","numeric-id":15217920}]]]}]} j46104qiw7df9xv35qsyfg0uivlfm7b wikibase-item application/json Wikidata:Contact the development team 4 181 110689117 110096208 2014-02-20T06:42:16Z Yair rand 3490 /* What's the plan for heavy data? */ __NEWSECTIONLINK__ {{Shortcut|WD:DEV}} {{Box|background-color=#F4F8FA|border=#A1CEF5|style=color: #333; clear: both;|1= Wikidata development is ongoing. You can leave notes for the development team here, on {{Freenode|wikidata}} and on the [https://lists.wikimedia.org/mailman/listinfo/wikidata-l mailing list] or [//bugzilla.wikimedia.org/enter_bug.cgi?product=MediaWiki%20extensions&component=WikidataRepo report bugs on Bugzilla].<br> See the [//bugzilla.wikimedia.org/buglist.cgi?emailcc1=1&resolution=---&emailtype1=exact&emailassigned_to1=1&query_format=advanced&email1=wikidata-bugs@lists.wikimedia.org&list_id=155962 list of bugs on Bugzilla]. }} Regarding the accounts of the Wikidata development team, we have decided on the following rules: * Wikidata developers can have clearly marked staff accounts (in the form "Fullname (WMDE)"), and these can receive admin and bureaucrat rights. * These staff accounts should be used ''only'' for development, testing, spam-fighting, and emergencies. * The private accounts of staff members do ''not'' get admin and bureaucrat rights by default. If staff members desire admin and bureaucrat rights for their private accounts, those should be gained going through the processes developed by the community. * Every staff member is free to use their private account just as everyone else, obviously. Especially if they want to work on content in Wikidata, this is the account they should be using, not their staff account. {{Archiving}} {{User:Hazard-Bot/Archiver |archiveheader = {{Archive||Contact the development team archive}} |algo = old(7d) |archive = Wikidata:Contact the development team/Archive/%(year)d/%(month)02d }} <!-- Please write under this line! --> == Search result limit == {{tracked|61021|fixed}} Since a few days, it's no more possible to get search result pages with more than 500 results using the <code>limit=xxx</code> parameter. That's especially annoying as the ranking of the search result changes when using the pagination. So if I go to page 2 of the search result, I am likely to see again some of the results from page 1, and therefore probably missing other ones that moved to some other page. Is this taking so many resources that it has to be turned off? (I use large search result pages a lot, especially as it's not possible to narrow your search e.g. on the English descriptions. So if you search for a certain English description, you will get tons of results where your search term is in the English label, the French description, Spanish sitelinks and so on. Or if you'd like to search for all items that are related to e.g. the Rolling Stones to check for possible duplicates, you'd like to have all results on one page for a better overview.) --[[User:YMS|YMS]] ([[User talk:YMS|{{int:Talkpagelinktext}}]]) 09:56, 1 February 2014 (UTC) : This might be something the Foundation changed recently when working on Cirrus Search. I will bring it up with the right people. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 13:01, 3 February 2014 (UTC) :: I just stumbled upon the regarding change: [https://gerrit.wikimedia.org/r/#/c/106553/]. So it's nothing specific with Wikidata. It would be nice if you could address it anyway. You don't need to mention that "5000 search results in a single page is too many to be useful" is the most idiotic commit message I've ever heard, though. --[[User:YMS|YMS]] ([[User talk:YMS|{{int:Talkpagelinktext}}]]) 20:14, 6 February 2014 (UTC) :::I've brought it up in the call last night and was asked to report a bug. It's at [[bugzilla:61021]]. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 12:06, 7 February 2014 (UTC) :::: Thank you very much, Lydia. --[[User:YMS|YMS]] ([[User talk:YMS|{{int:Talkpagelinktext}}]]) 12:59, 8 February 2014 (UTC) The change has been reverted. So thanks again, Lydia, and also Ricorisamoa and of course Chad. --[[User:YMS|YMS]] ([[User talk:YMS|{{int:Talkpagelinktext}}]]) 12:04, 12 February 2014 (UTC) == Bug or error? == I'm trying to change [[Q4167836]] labels with labels tool but it don't work and if you see item history I think there are errors --[[User:Rippitippi|Rippitippi]] ([[User talk:Rippitippi|{{int:Talkpagelinktext}}]]) 03:37, 26 January 2014 (UTC) : I unfortunately can't see what the issue was from the history. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 10:15, 26 January 2014 (UTC) :: The issue is that I can't change label, and in history you can see partial restore, db error --[[User:Rippitippi|Rippitippi]] ([[User talk:Rippitippi|{{int:Talkpagelinktext}}]]) 16:37, 26 January 2014 (UTC) :::Other problems with [[Q4167836]] are discussed [[#Can_not_link_zh:voy:Wikivoyage:.E5.88.86.E7.B1.BB_to_Q4167836_because_of_.22timeout.22|above]]. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 17:12, 26 January 2014 (UTC) ::::And I want to add a statement to this......--[[User:GZWDer|GZWDer]] ([[User talk:GZWDer|{{int:Talkpagelinktext}}]]) 04:47, 27 January 2014 (UTC) up --[[User:Rippitippi|Rippitippi]] ([[User talk:Rippitippi|{{int:Talkpagelinktext}}]]) 13:46, 3 February 2014 (UTC) Has one of you tried restoring the version of Dec 27th? --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 12:11, 7 February 2014 (UTC) :No --[[User:Rippitippi|Rippitippi]] ([[User talk:Rippitippi|{{int:Talkpagelinktext}}]]) 05:38, 11 February 2014 (UTC) == Wikipedia Watchlists == Hi, on a chat on french WP ( [[:fr:Wikipédia:Sondage/Wikidata Phase 2‎]] ) a fair number of users are concerned about modification tracking. They seem OK with tracking throught Wikipedia watchlist, but have a number of concerns: # The possible flood with datas that are not really used in the article # The lack of details there is now. I personaly am quite confident that both points will get better when performance issues will be solved to use other data items, but it's a littlé short to answer the concern, could Lydia tell here what is going on, so we have a formal link to put in those discussions ? [[User:TomT0m|TomT0m]] ([[User talk:TomT0m|{{int:Talkpagelinktext}}]]) 13:35, 9 February 2014 (UTC) : I do want to improve this because the trust in our data by the Wikipedians is obviously crucial. My issue is that I am not sure what exactly they'd like to see. So if we could gather some input on this that'd be ideal and move this forward. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 18:19, 10 February 2014 (UTC) : Hi, requests from {{U'|O.Taris}} (I translate) : ::We should be notified in the Watchlist of all modifications wikidata that impacts a followed wikipedia article, and only those one. To achieve that, if we follow article ''X'' in Wikipedia, we should be informed by watchlist : :::* of the change of a ''a'' data of the wikidata item ''X'' if and only if ''a'' is actually used in the Wikipédia article on ''X'' ; :::* of the change of a ''b'' date on the item ''Y'' if and only if ''b'' is actually used in an article on another ''X'' topic. ::: ''[snip he thinks that's not possible - I reply it will be probably possible when we will be able to use any data in the article because you will implement some kind of data usage tracking]'' [[User:TomT0m|TomT0m]] ([[User talk:TomT0m|{{int:Talkpagelinktext}}]]) 22:00, 10 February 2014 (UTC) Hi, there are similar issues on german and english WP. [https://de.wikipedia.org/wiki/Wikipedia_Diskussion:WikiProjekt_Wikidata_in_Wikipedia#Einbindung_von_Koordinaten][https://www.wikidata.org/wiki/Wikidata:Forum#Wikidata-Bearbeitung_in_Wikipedia][https://www.wikidata.org/wiki/Wikidata:Project_chat#Wikipedia_watchlist] There are 3 levels to the problem: *1) "show wikidata" on WP watchlist doesn't actually show wikidata changes. Doesn't work on deWP and enWP. Or maybe sometimes it does show and sometimes not. Or it works for some users and others not? "show wikidata" does work on WP recent changes though, it seems. *2) "show wikidata" floods the WP watchlist / recent change with cryptic changes. ** the wikidata edit comment shown on WP is quite useless, it's either "The wikidata object was changed" or "2 changes" (or 3, 4, etc.). You have to click on the diff to find out more on wikidata. And I really don't care if some wikidata label was changed in 1 of 250 languages - and since i feel like 99% of those "wikidata changes" are not relevant, i don't bother to click on those diffs. ** The [https://www.wikidata.org/w/index.php?title=Q11274&action=history wikidata history] shows better edit comments, a little less useless: :::* Goldzahn (talk | contribs)‎ . . (11,457 bytes) (+284)‎ . . (‎Created claim: Property:P1101: 77±1) (undo | thank) (restore) :::* OC Ripper (talk | contribs)‎ . . (11,173 bytes) (+74)‎ . . (‎Added link to [shwiki]: Chrysler Building) (undo | thank) (restore) :::* KrBot (talk | contribs)‎ . . (11,099 bytes) (+141)‎ . . (‎Added reference to claim: Property:P227: 4390482-8) (undo) (restore) :::* Legobot (talk | contribs)‎ . . (10,958 bytes) (+393)‎ . . (‎Added reference to claim: Property:P646: /m/01zmd) (undo) (restore) :::But to understand those wikidata changes, I would still need to learn by heart what Property:P1101 is (and thousands of other properties) or click through to wikidata? That should be easier to understand. Those wikidata changes would still be too many for my watchlist convenience (wikidata bots!). *3) "show relevant wikidata" (similar to requests from O.Taris above) meaning: show only changes of wikidata that is used in the WP. Most important example now: if the interwiki is removed/vandalised on wikidata. Or maybe in the future, coordinates from wikidata, etc. Another 4th issue is flagged revisions. German Wikipedia reviews changes to commons-files that are included in WP articles [https://de.wikipedia.org/wiki/Wikipedia_Diskussion:WikiProjekt_Wikidata_in_Wikipedia#Umgehung_von_Sichter-_und_Adminrechten]. A vandalised image isn't immediately shown live on deWP, a trusted user has to review the change. This could be a model for wikidata inclusion on deWP. Best, --[[User:Atlasowa|Atlasowa]] ([[User talk:Atlasowa|{{int:Talkpagelinktext}}]]) 10:23, 13 February 2014 (UTC) == What's the plan for heavy data? == What's the development team's plan for dealing with very large amounts of data relating to a single item? Currently, if we were to import some tens of thousands of statements about the demographics of a place into an item, the page would probably become unusable due to load times. (For all I know, Lua might not be able to easily access it easily as well.) Is there any plan to set up some way to have such data, perhaps by adding a table datatype, or having items not immediately load all statements when there are more than a certain number? Or are we going to have an effective maximum size for items, limiting the amount of data that can be added? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 08:13, 11 February 2014 (UTC) :How about a "demographic"-property for items with a large set of such statements? We already have items like {{Q|3044234}}, but not yet a natural way to link to it from the item about California. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 11:38, 11 February 2014 (UTC) :: This is not very useful to split data about one topic in different items. And the topic of large data sets is not only focus on demographic but on economics or in sciences too. :: A table or matrice property is an interesting suggestion. [[User:Snipre|Snipre]] ([[User talk:Snipre|{{int:Talkpagelinktext}}]]) 12:25, 11 February 2014 (UTC) :::Wikipedia already split the information in different ''articles'', and I'm afraid we have to follow, no matter we like it or not. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 13:41, 11 February 2014 (UTC) :::: {{ping|Lavallen}} Which Wikipedia ? The problem is that each wikipedia has its own principles about article splitting so following some wikipedia structure and not those of other wikipedias can be a risk for wikidata to be rejected by some wikipedias which will consider the wikidata structure as a pressure to normalize the articles structure according to the big wikipedias. I recently had this bad experience with the French wikipedia where there is a strong feeling that wikidata will be a pressure to adopt the English data. Wikidata has to be as neutral as possible and to be careful about specific features of each wikipedia in order to be accepted. The case of the references (bots sourcing data data with "imported from") shows that we can easily loose the interest of some communities. A smalle survey in WP:fr shows a quite large suspicion of the contributors about using data from wikidata. I know that WP:fr is often particular but I am quite sure that other wikipedias have the same feeling. [[User:Snipre|Snipre]] ([[User talk:Snipre|{{int:Talkpagelinktext}}]]) 14:28, 11 February 2014 (UTC) :::::The WP's who have articles like "Demographics of California" for example. Since California is English/Spanish-speaking, enwp and eswp are the most likly projects to find detailed articles about California. Just like Svwp have more articles about Stockholm, than any other project. There are hundreds of articles about Stockholm, with different point of view, and most of them can only be found at svwp. And I expect to find more articles about Quebec and France in frwp, than in any other project. :::::We have to choose if the article of "Demographics of California" should deposit their basic information in the item of "California" or if the projects who do not have such articles should collect such information from the "Demographic of California"-item. Both are possible, but a duplication of such information would not look good. :::::Yes, there is a large suspicion against Wikidata on Svwp too. And such things as "imported from" will ''never, never'' be accepted as source for any other things than very trivial. Wikidata has been accepted as a repository for Interwiki, and not even ''I'' am still pleased with how the infrastructure for statements look, but I am sure it will improve. The fact is that I am only aware of two users from svwp (me and User:Esquilo) are adding statements here. The rest is only adding sitelinks. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 14:57, 11 February 2014 (UTC) ::Even if we did split off demographic data about every populated location in existence into its own item, that wouldn't actually fix the problem. We'd just have all the demographic items loaded with tens of thousands of statements, making those items impossible to load, use, or edit. I don't think splitting the data off is a good idea, but that's not really relevant to this issue in any case. --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 20:20, 11 February 2014 (UTC) :If I had to guess, it's probably more of a UI/JavaScript issue rather than an internal issue. --'''[[User:Rschen7754|Rs]][[User talk:Rschen7754|chen]][[Special:Contributions/Rschen7754|7754]]''' 02:13, 12 February 2014 (UTC) ::Agree! I see no problem with collecting information from "slow" items in Wikipedia. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 07:05, 12 February 2014 (UTC) :::I don't understand this comment. --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 07:08, 12 February 2014 (UTC) ::::I see that it sometimes take some time to "load" the page in the Browser, but I see no problems of that kind when I use Wikidata-statements in Wikipedia related to such items. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 07:54, 12 February 2014 (UTC) :::::Oh, I see. I'm not sure how well that scales, though. I imported some basic census data about languages spoken in a place and ran a bit of Lua to build a table out of it at [[w:Wikipedia:Wikidata/Wikidata Sandbox]], and it doesn't run very quickly. If the data were to be a hundred times that, I think it might be unusable at current speeds. --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 10:07, 12 February 2014 (UTC) : Hey :) So as you know we're already working on reducing the amount of JS considerably and moving more into the backend. This will improve performance in the near future. Additionally we'll be loading more interface elements on demand. We'll be looking into making the default gadgets faster. We're moving more parts into caches where possible. This will improve the situation and allow workable items even if they are larger. I currently don't see us being able to handle items with thousands of data points though. In those cases we'll either need to split them or think really hard if Wikidata is really the right place to store them. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 11:41, 12 February 2014 (UTC) :: I don't think splitting the data across items is really feasible in some cases. Simply omitting the data eliminates a rather large number of use cases for Wikidata. For example, currently whenever a new census is done for a reasonably large country, as many as thousands of pages might need updating on various Wikipedias, as new data is published about the size of various linguistic, ethnic, religious, age, or economic groups in a country or subdivision. If we don't have space for anything more than a simple raw population number, then Wikidata won't be able to supply data to the relevant infoboxes and charts. There is a lot of data to handle per item, frequently requiring thousands of statements, especially if we keep all historical data from censuses in previous years. Perhaps the items could just not load or display certain statements until requested if there are, say, more than one hundred of a particular type? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 22:13, 12 February 2014 (UTC) :::"the items could just not load or display certain statements until requested...". To me, that looks like it can be solved simply by splitting the items? We know very little of how much time Scribunto needs to collect information from other items, than the directly linked item, but I guess our soft- and hardware developers will work on such issues. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 19:09, 13 February 2014 (UTC) ::::What would splitting the items look like, exactly? A whole bunch of separate items labelled things like "population statements for Oklahoma, part 1", "population statements for Oklahoma, part 2", "population statements for Oklahoma, part 149", etc? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 21:58, 13 February 2014 (UTC) :::::Why not something like: <pre> Census property: Oklahoma census 2010 P585: 2010 Census property: Oklahoma census 2005 P585: 2005 </pre> :::::in the item about Oklahoma? Yes, it becomes many items, but I do not see any problem with that. This will problably only be done with entities who have heavy WP-notability (like Oklahoma). Statistics Whatever do not have such detailed information as U.S. Census bureau and you will not have the same interest among the users to add such information for ''Foo town'' with 276 inhabitants. :::::A table in the item about Oklahoma would solve the UI-problem (and it's a good idea so far) but I am not sure it solves the problem with access from Scribunto in the client. Maybe my idea will solve that? -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 08:06, 14 February 2014 (UTC) ::::::I'm not sure I understand your idea. Would there be separate items for, say, the data on Ontario in the Canadian Census? Or would all subdivisions reference the same census? Either way, the data wouldn't all easily fit in one item, assuming all of the census data goes on the item for the census (which I assume is what you're proposing?). A single census on a single division can have well over a thousand relevant statements. Can Scribunto or the UI handle that? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 23:36, 16 February 2014 (UTC) :::::::Yes, I propose that we should have one item for the Canada-census and one for the Ontario-census and one for the London, Ontario-census. The alternative would be an extensive use of {{P|518}}, but I think that would make it more difficult for the bots who put the information here and for the Lua-scripts who read the information. :::::::And it's our choise to divide the items by year or by "type" of census. (census by language, enticity, sex, occupation, level of education, income-level etc...) -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 07:16, 17 February 2014 (UTC) ::::::::Probably by both, and probably divided substantially further still. There is an enormous amount of data in these censuses, and banning pretty much any part of them could be problematic. ::::::::Your proposal might work, but if we are at some point going to get a table datatype, that would really be preferable to hundreds of thousands of raw item statements. Similarly, if we do at some point have no limit to how many statements can be added to an item (perhaps via hiding some statements initially), I don't think we would would need to split these things across items at all. {{Ping|Lydia Pintscher (WMDE)}} Are either of these things at all likely to happen? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 06:42, 20 February 2014 (UTC) == Same Wikipedia link in two different Wikidata items? == [[:sk:Prometheus (časopis)]] is present in both [[Q12774688]] and [[Q13538701]]. How is this possible and how should it be fixed? It's the first time I see something like that. --[[User:Canyq|Canyq]] ([[User talk:Canyq|{{int:Talkpagelinktext}}]]) 05:47, 12 February 2014 (UTC) : It sporadically happens, see [[Wikidata:True duplicates]]. --[[User:YMS|YMS]] ([[User talk:YMS|{{int:Talkpagelinktext}}]]) 06:56, 12 February 2014 (UTC) :: I wasn't aware of that. Thank you very much for the info. --[[User:Canyq|Canyq]] ([[User talk:Canyq|{{int:Talkpagelinktext}}]]) 16:02, 12 February 2014 (UTC) == Thought on the automated move function == (I hope this comes out sounding intelligible.) This may be a nice-to-have... On the client, when a page is moved, Wikibase (usually) updates the local link here to point to the new location. What would be nice is, if the item's previous title in the language of the affected page ''matches'' the previous article's title, that the item's title in that language also be changed. For example, a number of ship class items ''right after import'' into Wikidata went from (on enwiki) "classname class type" to "classname-class type". On Wikidata, their title had been imported as "classname class type", and this was not updated when the pages on the client were moved. Thoughts? --[[User:Izno|Izno]] ([[User talk:Izno|{{int:Talkpagelinktext}}]]) 15:08, 12 February 2014 (UTC) : This sounds like an option. However if pages are moved to disambiguate (for example from "Berlin" to "Berlin (Album)") then the label shouldn't change to that. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 15:16, 12 February 2014 (UTC) ::I think that might provide too many false positives to consider&mdash;there are more than a few pages with params as part of the article title. Hmm&hellip;. --[[User:Izno|Izno]] ([[User talk:Izno|{{int:Talkpagelinktext}}]]) 16:09, 12 February 2014 (UTC) == Deleted item still in search results == I found that deleted item [[Q9028640]] can be found still in search results ([https://www.wikidata.org/w/index.php?title=Special%3ASearch&profile=default&search=Q9028640&fulltext=Search try]). I don't know is it related to Wikidata, MediaWiki or CirrusSearch. --[[User:Stryn|Stryn]] ([[User talk:Stryn|{{int:Talkpagelinktext}}]]) 19:59, 16 February 2014 (UTC) : I've reported it at [[bugzilla:61464]]. The search guys are usually very responsive so I expect a comment there soon. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 14:47, 17 February 2014 (UTC) 0d37v3n2u12iu9zhzuc1q0e0u6jxtsg wikitext text/x-wiki 110689770 110689117 2014-02-20T06:51:18Z Hazard-Bot 5625 Robot: Archiving 2 threads (older than 7d) to [[Wikidata:Contact the development team/Archive/2014/02]]. __NEWSECTIONLINK__ {{Shortcut|WD:DEV}} {{Box|background-color=#F4F8FA|border=#A1CEF5|style=color: #333; clear: both;|1= Wikidata development is ongoing. You can leave notes for the development team here, on {{Freenode|wikidata}} and on the [https://lists.wikimedia.org/mailman/listinfo/wikidata-l mailing list] or [//bugzilla.wikimedia.org/enter_bug.cgi?product=MediaWiki%20extensions&component=WikidataRepo report bugs on Bugzilla].<br> See the [//bugzilla.wikimedia.org/buglist.cgi?emailcc1=1&resolution=---&emailtype1=exact&emailassigned_to1=1&query_format=advanced&email1=wikidata-bugs@lists.wikimedia.org&list_id=155962 list of bugs on Bugzilla]. }} Regarding the accounts of the Wikidata development team, we have decided on the following rules: * Wikidata developers can have clearly marked staff accounts (in the form "Fullname (WMDE)"), and these can receive admin and bureaucrat rights. * These staff accounts should be used ''only'' for development, testing, spam-fighting, and emergencies. * The private accounts of staff members do ''not'' get admin and bureaucrat rights by default. If staff members desire admin and bureaucrat rights for their private accounts, those should be gained going through the processes developed by the community. * Every staff member is free to use their private account just as everyone else, obviously. Especially if they want to work on content in Wikidata, this is the account they should be using, not their staff account. {{Archiving}} {{User:Hazard-Bot/Archiver |archiveheader = {{Archive||Contact the development team archive}} |algo = old(7d) |archive = Wikidata:Contact the development team/Archive/%(year)d/%(month)02d }} <!-- Please write under this line! --> == Wikipedia Watchlists == Hi, on a chat on french WP ( [[:fr:Wikipédia:Sondage/Wikidata Phase 2‎]] ) a fair number of users are concerned about modification tracking. They seem OK with tracking throught Wikipedia watchlist, but have a number of concerns: # The possible flood with datas that are not really used in the article # The lack of details there is now. I personaly am quite confident that both points will get better when performance issues will be solved to use other data items, but it's a littlé short to answer the concern, could Lydia tell here what is going on, so we have a formal link to put in those discussions ? [[User:TomT0m|TomT0m]] ([[User talk:TomT0m|{{int:Talkpagelinktext}}]]) 13:35, 9 February 2014 (UTC) : I do want to improve this because the trust in our data by the Wikipedians is obviously crucial. My issue is that I am not sure what exactly they'd like to see. So if we could gather some input on this that'd be ideal and move this forward. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 18:19, 10 February 2014 (UTC) : Hi, requests from {{U'|O.Taris}} (I translate) : ::We should be notified in the Watchlist of all modifications wikidata that impacts a followed wikipedia article, and only those one. To achieve that, if we follow article ''X'' in Wikipedia, we should be informed by watchlist : :::* of the change of a ''a'' data of the wikidata item ''X'' if and only if ''a'' is actually used in the Wikipédia article on ''X'' ; :::* of the change of a ''b'' date on the item ''Y'' if and only if ''b'' is actually used in an article on another ''X'' topic. ::: ''[snip he thinks that's not possible - I reply it will be probably possible when we will be able to use any data in the article because you will implement some kind of data usage tracking]'' [[User:TomT0m|TomT0m]] ([[User talk:TomT0m|{{int:Talkpagelinktext}}]]) 22:00, 10 February 2014 (UTC) Hi, there are similar issues on german and english WP. [https://de.wikipedia.org/wiki/Wikipedia_Diskussion:WikiProjekt_Wikidata_in_Wikipedia#Einbindung_von_Koordinaten][https://www.wikidata.org/wiki/Wikidata:Forum#Wikidata-Bearbeitung_in_Wikipedia][https://www.wikidata.org/wiki/Wikidata:Project_chat#Wikipedia_watchlist] There are 3 levels to the problem: *1) "show wikidata" on WP watchlist doesn't actually show wikidata changes. Doesn't work on deWP and enWP. Or maybe sometimes it does show and sometimes not. Or it works for some users and others not? "show wikidata" does work on WP recent changes though, it seems. *2) "show wikidata" floods the WP watchlist / recent change with cryptic changes. ** the wikidata edit comment shown on WP is quite useless, it's either "The wikidata object was changed" or "2 changes" (or 3, 4, etc.). You have to click on the diff to find out more on wikidata. And I really don't care if some wikidata label was changed in 1 of 250 languages - and since i feel like 99% of those "wikidata changes" are not relevant, i don't bother to click on those diffs. ** The [https://www.wikidata.org/w/index.php?title=Q11274&action=history wikidata history] shows better edit comments, a little less useless: :::* Goldzahn (talk | contribs)‎ . . (11,457 bytes) (+284)‎ . . (‎Created claim: Property:P1101: 77±1) (undo | thank) (restore) :::* OC Ripper (talk | contribs)‎ . . (11,173 bytes) (+74)‎ . . (‎Added link to [shwiki]: Chrysler Building) (undo | thank) (restore) :::* KrBot (talk | contribs)‎ . . (11,099 bytes) (+141)‎ . . (‎Added reference to claim: Property:P227: 4390482-8) (undo) (restore) :::* Legobot (talk | contribs)‎ . . (10,958 bytes) (+393)‎ . . (‎Added reference to claim: Property:P646: /m/01zmd) (undo) (restore) :::But to understand those wikidata changes, I would still need to learn by heart what Property:P1101 is (and thousands of other properties) or click through to wikidata? That should be easier to understand. Those wikidata changes would still be too many for my watchlist convenience (wikidata bots!). *3) "show relevant wikidata" (similar to requests from O.Taris above) meaning: show only changes of wikidata that is used in the WP. Most important example now: if the interwiki is removed/vandalised on wikidata. Or maybe in the future, coordinates from wikidata, etc. Another 4th issue is flagged revisions. German Wikipedia reviews changes to commons-files that are included in WP articles [https://de.wikipedia.org/wiki/Wikipedia_Diskussion:WikiProjekt_Wikidata_in_Wikipedia#Umgehung_von_Sichter-_und_Adminrechten]. A vandalised image isn't immediately shown live on deWP, a trusted user has to review the change. This could be a model for wikidata inclusion on deWP. Best, --[[User:Atlasowa|Atlasowa]] ([[User talk:Atlasowa|{{int:Talkpagelinktext}}]]) 10:23, 13 February 2014 (UTC) == What's the plan for heavy data? == What's the development team's plan for dealing with very large amounts of data relating to a single item? Currently, if we were to import some tens of thousands of statements about the demographics of a place into an item, the page would probably become unusable due to load times. (For all I know, Lua might not be able to easily access it easily as well.) Is there any plan to set up some way to have such data, perhaps by adding a table datatype, or having items not immediately load all statements when there are more than a certain number? Or are we going to have an effective maximum size for items, limiting the amount of data that can be added? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 08:13, 11 February 2014 (UTC) :How about a "demographic"-property for items with a large set of such statements? We already have items like {{Q|3044234}}, but not yet a natural way to link to it from the item about California. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 11:38, 11 February 2014 (UTC) :: This is not very useful to split data about one topic in different items. And the topic of large data sets is not only focus on demographic but on economics or in sciences too. :: A table or matrice property is an interesting suggestion. [[User:Snipre|Snipre]] ([[User talk:Snipre|{{int:Talkpagelinktext}}]]) 12:25, 11 February 2014 (UTC) :::Wikipedia already split the information in different ''articles'', and I'm afraid we have to follow, no matter we like it or not. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 13:41, 11 February 2014 (UTC) :::: {{ping|Lavallen}} Which Wikipedia ? The problem is that each wikipedia has its own principles about article splitting so following some wikipedia structure and not those of other wikipedias can be a risk for wikidata to be rejected by some wikipedias which will consider the wikidata structure as a pressure to normalize the articles structure according to the big wikipedias. I recently had this bad experience with the French wikipedia where there is a strong feeling that wikidata will be a pressure to adopt the English data. Wikidata has to be as neutral as possible and to be careful about specific features of each wikipedia in order to be accepted. The case of the references (bots sourcing data data with "imported from") shows that we can easily loose the interest of some communities. A smalle survey in WP:fr shows a quite large suspicion of the contributors about using data from wikidata. I know that WP:fr is often particular but I am quite sure that other wikipedias have the same feeling. [[User:Snipre|Snipre]] ([[User talk:Snipre|{{int:Talkpagelinktext}}]]) 14:28, 11 February 2014 (UTC) :::::The WP's who have articles like "Demographics of California" for example. Since California is English/Spanish-speaking, enwp and eswp are the most likly projects to find detailed articles about California. Just like Svwp have more articles about Stockholm, than any other project. There are hundreds of articles about Stockholm, with different point of view, and most of them can only be found at svwp. And I expect to find more articles about Quebec and France in frwp, than in any other project. :::::We have to choose if the article of "Demographics of California" should deposit their basic information in the item of "California" or if the projects who do not have such articles should collect such information from the "Demographic of California"-item. Both are possible, but a duplication of such information would not look good. :::::Yes, there is a large suspicion against Wikidata on Svwp too. And such things as "imported from" will ''never, never'' be accepted as source for any other things than very trivial. Wikidata has been accepted as a repository for Interwiki, and not even ''I'' am still pleased with how the infrastructure for statements look, but I am sure it will improve. The fact is that I am only aware of two users from svwp (me and User:Esquilo) are adding statements here. The rest is only adding sitelinks. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 14:57, 11 February 2014 (UTC) ::Even if we did split off demographic data about every populated location in existence into its own item, that wouldn't actually fix the problem. We'd just have all the demographic items loaded with tens of thousands of statements, making those items impossible to load, use, or edit. I don't think splitting the data off is a good idea, but that's not really relevant to this issue in any case. --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 20:20, 11 February 2014 (UTC) :If I had to guess, it's probably more of a UI/JavaScript issue rather than an internal issue. --'''[[User:Rschen7754|Rs]][[User talk:Rschen7754|chen]][[Special:Contributions/Rschen7754|7754]]''' 02:13, 12 February 2014 (UTC) ::Agree! I see no problem with collecting information from "slow" items in Wikipedia. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 07:05, 12 February 2014 (UTC) :::I don't understand this comment. --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 07:08, 12 February 2014 (UTC) ::::I see that it sometimes take some time to "load" the page in the Browser, but I see no problems of that kind when I use Wikidata-statements in Wikipedia related to such items. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 07:54, 12 February 2014 (UTC) :::::Oh, I see. I'm not sure how well that scales, though. I imported some basic census data about languages spoken in a place and ran a bit of Lua to build a table out of it at [[w:Wikipedia:Wikidata/Wikidata Sandbox]], and it doesn't run very quickly. If the data were to be a hundred times that, I think it might be unusable at current speeds. --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 10:07, 12 February 2014 (UTC) : Hey :) So as you know we're already working on reducing the amount of JS considerably and moving more into the backend. This will improve performance in the near future. Additionally we'll be loading more interface elements on demand. We'll be looking into making the default gadgets faster. We're moving more parts into caches where possible. This will improve the situation and allow workable items even if they are larger. I currently don't see us being able to handle items with thousands of data points though. In those cases we'll either need to split them or think really hard if Wikidata is really the right place to store them. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 11:41, 12 February 2014 (UTC) :: I don't think splitting the data across items is really feasible in some cases. Simply omitting the data eliminates a rather large number of use cases for Wikidata. For example, currently whenever a new census is done for a reasonably large country, as many as thousands of pages might need updating on various Wikipedias, as new data is published about the size of various linguistic, ethnic, religious, age, or economic groups in a country or subdivision. If we don't have space for anything more than a simple raw population number, then Wikidata won't be able to supply data to the relevant infoboxes and charts. There is a lot of data to handle per item, frequently requiring thousands of statements, especially if we keep all historical data from censuses in previous years. Perhaps the items could just not load or display certain statements until requested if there are, say, more than one hundred of a particular type? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 22:13, 12 February 2014 (UTC) :::"the items could just not load or display certain statements until requested...". To me, that looks like it can be solved simply by splitting the items? We know very little of how much time Scribunto needs to collect information from other items, than the directly linked item, but I guess our soft- and hardware developers will work on such issues. -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 19:09, 13 February 2014 (UTC) ::::What would splitting the items look like, exactly? A whole bunch of separate items labelled things like "population statements for Oklahoma, part 1", "population statements for Oklahoma, part 2", "population statements for Oklahoma, part 149", etc? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 21:58, 13 February 2014 (UTC) :::::Why not something like: <pre> Census property: Oklahoma census 2010 P585: 2010 Census property: Oklahoma census 2005 P585: 2005 </pre> :::::in the item about Oklahoma? Yes, it becomes many items, but I do not see any problem with that. This will problably only be done with entities who have heavy WP-notability (like Oklahoma). Statistics Whatever do not have such detailed information as U.S. Census bureau and you will not have the same interest among the users to add such information for ''Foo town'' with 276 inhabitants. :::::A table in the item about Oklahoma would solve the UI-problem (and it's a good idea so far) but I am not sure it solves the problem with access from Scribunto in the client. Maybe my idea will solve that? -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 08:06, 14 February 2014 (UTC) ::::::I'm not sure I understand your idea. Would there be separate items for, say, the data on Ontario in the Canadian Census? Or would all subdivisions reference the same census? Either way, the data wouldn't all easily fit in one item, assuming all of the census data goes on the item for the census (which I assume is what you're proposing?). A single census on a single division can have well over a thousand relevant statements. Can Scribunto or the UI handle that? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 23:36, 16 February 2014 (UTC) :::::::Yes, I propose that we should have one item for the Canada-census and one for the Ontario-census and one for the London, Ontario-census. The alternative would be an extensive use of {{P|518}}, but I think that would make it more difficult for the bots who put the information here and for the Lua-scripts who read the information. :::::::And it's our choise to divide the items by year or by "type" of census. (census by language, enticity, sex, occupation, level of education, income-level etc...) -- [[User:Lavallen|Lavallen]] ([[User talk:Lavallen|{{int:Talkpagelinktext}}]]) 07:16, 17 February 2014 (UTC) ::::::::Probably by both, and probably divided substantially further still. There is an enormous amount of data in these censuses, and banning pretty much any part of them could be problematic. ::::::::Your proposal might work, but if we are at some point going to get a table datatype, that would really be preferable to hundreds of thousands of raw item statements. Similarly, if we do at some point have no limit to how many statements can be added to an item (perhaps via hiding some statements initially), I don't think we would would need to split these things across items at all. {{Ping|Lydia Pintscher (WMDE)}} Are either of these things at all likely to happen? --[[User:Yair rand|Yair rand]] ([[User talk:Yair rand|{{int:Talkpagelinktext}}]]) 06:42, 20 February 2014 (UTC) == Same Wikipedia link in two different Wikidata items? == [[:sk:Prometheus (časopis)]] is present in both [[Q12774688]] and [[Q13538701]]. How is this possible and how should it be fixed? It's the first time I see something like that. --[[User:Canyq|Canyq]] ([[User talk:Canyq|{{int:Talkpagelinktext}}]]) 05:47, 12 February 2014 (UTC) : It sporadically happens, see [[Wikidata:True duplicates]]. --[[User:YMS|YMS]] ([[User talk:YMS|{{int:Talkpagelinktext}}]]) 06:56, 12 February 2014 (UTC) :: I wasn't aware of that. Thank you very much for the info. --[[User:Canyq|Canyq]] ([[User talk:Canyq|{{int:Talkpagelinktext}}]]) 16:02, 12 February 2014 (UTC) == Thought on the automated move function == (I hope this comes out sounding intelligible.) This may be a nice-to-have... On the client, when a page is moved, Wikibase (usually) updates the local link here to point to the new location. What would be nice is, if the item's previous title in the language of the affected page ''matches'' the previous article's title, that the item's title in that language also be changed. For example, a number of ship class items ''right after import'' into Wikidata went from (on enwiki) "classname class type" to "classname-class type". On Wikidata, their title had been imported as "classname class type", and this was not updated when the pages on the client were moved. Thoughts? --[[User:Izno|Izno]] ([[User talk:Izno|{{int:Talkpagelinktext}}]]) 15:08, 12 February 2014 (UTC) : This sounds like an option. However if pages are moved to disambiguate (for example from "Berlin" to "Berlin (Album)") then the label shouldn't change to that. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 15:16, 12 February 2014 (UTC) ::I think that might provide too many false positives to consider&mdash;there are more than a few pages with params as part of the article title. Hmm&hellip;. --[[User:Izno|Izno]] ([[User talk:Izno|{{int:Talkpagelinktext}}]]) 16:09, 12 February 2014 (UTC) == Deleted item still in search results == I found that deleted item [[Q9028640]] can be found still in search results ([https://www.wikidata.org/w/index.php?title=Special%3ASearch&profile=default&search=Q9028640&fulltext=Search try]). I don't know is it related to Wikidata, MediaWiki or CirrusSearch. --[[User:Stryn|Stryn]] ([[User talk:Stryn|{{int:Talkpagelinktext}}]]) 19:59, 16 February 2014 (UTC) : I've reported it at [[bugzilla:61464]]. The search guys are usually very responsive so I expect a comment there soon. --[[User:Lydia Pintscher (WMDE)|Lydia Pintscher (WMDE)]] ([[User talk:Lydia Pintscher (WMDE)|{{int:Talkpagelinktext}}]]) 14:47, 17 February 2014 (UTC) ofkf3r8kinlbu9zvwk9m4r7g64o3gb5 wikitext text/x-wiki Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/wikidatawiki-20140210-index.html000066400000000000000000000355431444772566300311000ustar00rootroot00000000000000 wikidatawiki dump progress on 20140210

wikidatawiki dump progress on 20140210

This is the Wikimedia dump service. Please read the copyrights information. See Meta:Data dumps for documentation on the provided data formats.

See all databases list.

Last dumped on 2014-01-23

Dump complete

Verify downloaded files against the MD5 checksums to check for corrupted files.

Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/wikidatawiki-20140210-md5sums.txt000066400000000000000000000050631444772566300312330ustar00rootroot000000000000004e7ecd8f9629618b841c873744626547 wikidatawiki-20140210-site_stats.sql.gz fcb2639180dc818c949afcf5176cd284 wikidatawiki-20140210-image.sql.gz 917ae6674cd916cda3c79a854d728f1e wikidatawiki-20140210-pagelinks.sql.gz 8e82bfc40a15647671c066a98cac4373 wikidatawiki-20140210-categorylinks.sql.gz 75999aac9c81c1e4950c9de1610b1b3c wikidatawiki-20140210-imagelinks.sql.gz 1e8bcc5550bd62749686a16643f560d1 wikidatawiki-20140210-templatelinks.sql.gz 3c1b62b14a422fb350a7380b9a12f207 wikidatawiki-20140210-externallinks.sql.gz 91ecc4cd65a7b780bca7a0dd98d74f4c wikidatawiki-20140210-langlinks.sql.gz 8ce95948d6ed6e0f187ba1980d55f97e wikidatawiki-20140210-interwiki.sql.gz a2daf44479ea7a785a500828d0267cee wikidatawiki-20140210-user_groups.sql.gz cc67805664ce9cbf1a5ac34c70677469 wikidatawiki-20140210-category.sql.gz eb38b901ce8ec83f068d12d6d893aecb wikidatawiki-20140210-page.sql.gz 64f640c5c1424dd26f14c27351b5477b wikidatawiki-20140210-page_restrictions.sql.gz 5770803589fbe08b8934f1af97cb1f72 wikidatawiki-20140210-page_props.sql.gz 889db73f61ef57afbd7b9b150517864c wikidatawiki-20140210-protected_titles.sql.gz eb8bae32805d960f96df319d5f5f241c wikidatawiki-20140210-redirect.sql.gz bc55dbe6be9fbfb763c4f5b5e92fd1ce wikidatawiki-20140210-iwlinks.sql.gz 413784399d0abf83d528abfee7a52bd0 wikidatawiki-20140210-all-titles-in-ns0.gz b52a2f4c1a315cf50312e5e4e474d456 wikidatawiki-20140210-all-titles.gz 1300e18c0da56e0c6056d62a95653460 wikidatawiki-20140210-abstract.xml b10a3d0bf9f13c004dd513960566fae1 wikidatawiki-20140210-stub-meta-history.xml.gz d7b204e5fa49a5d89e06fd75760cd1e4 wikidatawiki-20140210-stub-meta-current.xml.gz 0c900efd53580b3091f81517e12588be wikidatawiki-20140210-stub-articles.xml.gz e37f5153e5713b6729a953d17cd7358c wikidatawiki-20140210-pages-articles.xml.bz2 09f0c38c6409ac4765c19b7c771710ca wikidatawiki-20140210-pages-meta-current.xml.bz2 97a1f85d7f04b488e021ee6ef385c995 wikidatawiki-20140210-pages-logging.xml.gz c759d8bdb53dba9f57af5f66bfde0143 wikidatawiki-20140210-wb_items_per_site.sql.gz 895bab0b86d0da623682bc345bbba2f8 wikidatawiki-20140210-wb_terms.sql.gz 5f809b2394a3ce9773678e26107cbffc wikidatawiki-20140210-wb_entity_per_page.sql.gz 1a1eb045b5353b1fd901bd71a5d85b8f wikidatawiki-20140210-sites.sql.gz 56903a5d987bea0c93802f6bbb028009 wikidatawiki-20140210-pages-meta-history.xml.bz2 cfd325d2dc6a42932124f84e0e46d98b wikidatawiki-20140210-pages-meta-history.xml.7z a0214e1ad7797dee33177fbfc95fdffe wikidatawiki-20140210-pages-articles-multistream.xml.bz2 fa88ab8e2157decf0d2493d7a644f988 wikidatawiki-20140210-pages-articles-multistream-index.txt.bz2 Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/wikidatawiki-20140420-sites.sql000066400000000000000000005565611444772566300307660ustar00rootroot00000000000000-- MySQL dump 10.13 Distrib 5.5.35, for debian-linux-gnu (x86_64) -- -- Host: 10.64.0.9 Database: wikidatawiki -- ------------------------------------------------------ -- Server version 5.5.34-MariaDB-1~precise-log /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `sites` -- DROP TABLE IF EXISTS `sites`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `sites` ( `site_id` int(10) unsigned NOT NULL AUTO_INCREMENT, `site_global_key` varbinary(32) NOT NULL, `site_type` varbinary(32) NOT NULL, `site_group` varbinary(32) NOT NULL, `site_source` varbinary(32) NOT NULL, `site_language` varbinary(32) NOT NULL, `site_protocol` varbinary(32) NOT NULL, `site_domain` varbinary(255) NOT NULL, `site_data` blob NOT NULL, `site_forward` tinyint(1) NOT NULL, `site_config` blob NOT NULL, PRIMARY KEY (`site_id`), UNIQUE KEY `sites_global_key` (`site_global_key`), KEY `sites_type` (`site_type`), KEY `sites_group` (`site_group`), KEY `sites_source` (`site_source`), KEY `sites_language` (`site_language`), KEY `sites_protocol` (`site_protocol`), KEY `sites_domain` (`site_domain`), KEY `sites_forward` (`site_forward`) ) ENGINE=InnoDB AUTO_INCREMENT=881 DEFAULT CHARSET=binary; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `sites` -- /*!40000 ALTER TABLE `sites` DISABLE KEYS */; INSERT INTO `sites` VALUES (1,'aawiki','mediawiki','wikipedia','local','aa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"http://aa.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"http://aa.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(2,'aawiktionary','mediawiki','wiktionary','local','aa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//aa.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//aa.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(3,'aawikibooks','mediawiki','wikibooks','local','aa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//aa.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//aa.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(4,'abwiki','mediawiki','wikipedia','local','ab','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ab.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ab.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(5,'abwiktionary','mediawiki','wiktionary','local','ab','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ab.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ab.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(6,'acewiki','mediawiki','wikipedia','local','ace','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ace.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ace.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(7,'afwiki','mediawiki','wikipedia','local','af','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//af.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//af.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(8,'afwiktionary','mediawiki','wiktionary','local','af','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//af.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//af.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(9,'afwikibooks','mediawiki','wikibooks','local','af','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//af.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//af.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(10,'afwikiquote','mediawiki','wikiquote','local','af','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//af.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//af.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(11,'akwiki','mediawiki','wikipedia','local','ak','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ak.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ak.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(12,'akwiktionary','mediawiki','wiktionary','local','ak','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ak.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ak.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(13,'akwikibooks','mediawiki','wikibooks','local','ak','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ak.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ak.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(14,'alswiki','mediawiki','wikipedia','local','als','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//als.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//als.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(15,'alswiktionary','mediawiki','wiktionary','local','als','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//als.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//als.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(16,'alswikibooks','mediawiki','wikibooks','local','als','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//als.wikibooks.org/w/$1\";s:9:\"page_path\";s:27:\"//als.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(17,'alswikiquote','mediawiki','wikiquote','local','als','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//als.wikiquote.org/w/$1\";s:9:\"page_path\";s:27:\"//als.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(18,'amwiki','mediawiki','wikipedia','local','am','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//am.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//am.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(19,'amwiktionary','mediawiki','wiktionary','local','am','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//am.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//am.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(20,'amwikiquote','mediawiki','wikiquote','local','am','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//am.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//am.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(21,'anwiki','mediawiki','wikipedia','local','an','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//an.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//an.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(22,'anwiktionary','mediawiki','wiktionary','local','an','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//an.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//an.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(23,'angwiki','mediawiki','wikipedia','local','ang','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ang.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ang.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(24,'angwiktionary','mediawiki','wiktionary','local','ang','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ang.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//ang.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(25,'angwikibooks','mediawiki','wikibooks','local','ang','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ang.wikibooks.org/w/$1\";s:9:\"page_path\";s:27:\"//ang.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(26,'angwikiquote','mediawiki','wikiquote','local','ang','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ang.wikiquote.org/w/$1\";s:9:\"page_path\";s:27:\"//ang.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(27,'angwikisource','mediawiki','wikisource','local','ang','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ang.wikisource.org/w/$1\";s:9:\"page_path\";s:28:\"//ang.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(28,'arwiki','mediawiki','wikipedia','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ar.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ar.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(29,'arwiktionary','mediawiki','wiktionary','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ar.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ar.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(30,'arwikibooks','mediawiki','wikibooks','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ar.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ar.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(31,'arwikinews','mediawiki','wikinews','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ar.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ar.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(32,'arwikiquote','mediawiki','wikiquote','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ar.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ar.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(33,'arwikisource','mediawiki','wikisource','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ar.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ar.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(34,'arwikiversity','mediawiki','wikiversity','local','ar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ar.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//ar.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(35,'arcwiki','mediawiki','wikipedia','local','arc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//arc.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//arc.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(36,'arzwiki','mediawiki','wikipedia','local','arz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//arz.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//arz.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(37,'aswiki','mediawiki','wikipedia','local','as','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//as.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//as.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(38,'aswiktionary','mediawiki','wiktionary','local','as','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//as.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//as.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(39,'aswikibooks','mediawiki','wikibooks','local','as','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//as.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//as.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(40,'astwiki','mediawiki','wikipedia','local','ast','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ast.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ast.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(41,'astwiktionary','mediawiki','wiktionary','local','ast','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ast.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//ast.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(42,'astwikibooks','mediawiki','wikibooks','local','ast','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ast.wikibooks.org/w/$1\";s:9:\"page_path\";s:27:\"//ast.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(43,'astwikiquote','mediawiki','wikiquote','local','ast','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ast.wikiquote.org/w/$1\";s:9:\"page_path\";s:27:\"//ast.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(44,'avwiki','mediawiki','wikipedia','local','av','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//av.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//av.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(45,'avwiktionary','mediawiki','wiktionary','local','av','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//av.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//av.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(46,'aywiki','mediawiki','wikipedia','local','ay','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ay.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ay.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(47,'aywiktionary','mediawiki','wiktionary','local','ay','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ay.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ay.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(48,'aywikibooks','mediawiki','wikibooks','local','ay','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ay.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ay.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(49,'azwiki','mediawiki','wikipedia','local','az','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//az.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//az.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(50,'azwiktionary','mediawiki','wiktionary','local','az','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//az.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//az.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(51,'azwikibooks','mediawiki','wikibooks','local','az','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//az.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//az.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(52,'azwikiquote','mediawiki','wikiquote','local','az','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//az.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//az.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(53,'azwikisource','mediawiki','wikisource','local','az','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//az.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//az.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(54,'bawiki','mediawiki','wikipedia','local','ba','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ba.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ba.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(55,'bawikibooks','mediawiki','wikibooks','local','ba','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ba.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ba.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(56,'barwiki','mediawiki','wikipedia','local','bar','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bar.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//bar.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(57,'bat_smgwiki','mediawiki','wikipedia','local','bat-smg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//bat-smg.wikipedia.org/w/$1\";s:9:\"page_path\";s:31:\"//bat-smg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(58,'bclwiki','mediawiki','wikipedia','local','bcl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bcl.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//bcl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(59,'bewiki','mediawiki','wikipedia','local','be','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//be.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//be.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(60,'bewiktionary','mediawiki','wiktionary','local','be','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//be.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//be.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(61,'bewikibooks','mediawiki','wikibooks','local','be','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//be.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//be.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(62,'bewikiquote','mediawiki','wikiquote','local','be','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//be.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//be.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(63,'bewikisource','mediawiki','wikisource','local','be','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//be.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//be.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(64,'be_x_oldwiki','mediawiki','wikipedia','local','be-x-old','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//be-x-old.wikipedia.org/w/$1\";s:9:\"page_path\";s:32:\"//be-x-old.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(65,'bgwiki','mediawiki','wikipedia','local','bg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bg.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(66,'bgwiktionary','mediawiki','wiktionary','local','bg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bg.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bg.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(67,'bgwikibooks','mediawiki','wikibooks','local','bg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bg.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//bg.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(68,'bgwikinews','mediawiki','wikinews','local','bg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//bg.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//bg.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(69,'bgwikiquote','mediawiki','wikiquote','local','bg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bg.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//bg.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(70,'bgwikisource','mediawiki','wikisource','local','bg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bg.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//bg.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(71,'bhwiki','mediawiki','wikipedia','local','bh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bh.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(72,'bhwiktionary','mediawiki','wiktionary','local','bh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bh.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bh.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(73,'biwiki','mediawiki','wikipedia','local','bi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(74,'biwiktionary','mediawiki','wiktionary','local','bi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(75,'biwikibooks','mediawiki','wikibooks','local','bi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bi.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//bi.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(76,'bjnwiki','mediawiki','wikipedia','local','bjn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bjn.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//bjn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(77,'bmwiki','mediawiki','wikipedia','local','bm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bm.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bm.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(78,'bmwiktionary','mediawiki','wiktionary','local','bm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bm.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bm.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(79,'bmwikibooks','mediawiki','wikibooks','local','bm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bm.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//bm.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(80,'bmwikiquote','mediawiki','wikiquote','local','bm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bm.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//bm.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(81,'bnwiki','mediawiki','wikipedia','local','bn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(82,'bnwiktionary','mediawiki','wiktionary','local','bn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(83,'bnwikibooks','mediawiki','wikibooks','local','bn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bn.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//bn.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(84,'bnwikisource','mediawiki','wikisource','local','bn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bn.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//bn.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(85,'bowiki','mediawiki','wikipedia','local','bo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(86,'bowiktionary','mediawiki','wiktionary','local','bo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(87,'bowikibooks','mediawiki','wikibooks','local','bo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bo.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//bo.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(88,'bpywiki','mediawiki','wikipedia','local','bpy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bpy.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//bpy.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(89,'brwiki','mediawiki','wikipedia','local','br','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//br.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//br.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(90,'brwiktionary','mediawiki','wiktionary','local','br','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//br.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//br.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(91,'brwikiquote','mediawiki','wikiquote','local','br','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//br.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//br.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(92,'brwikisource','mediawiki','wikisource','local','br','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//br.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//br.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(93,'bswiki','mediawiki','wikipedia','local','bs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bs.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bs.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(94,'bswiktionary','mediawiki','wiktionary','local','bs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bs.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//bs.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(95,'bswikibooks','mediawiki','wikibooks','local','bs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bs.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//bs.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(96,'bswikinews','mediawiki','wikinews','local','bs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//bs.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//bs.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(97,'bswikiquote','mediawiki','wikiquote','local','bs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bs.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//bs.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(98,'bswikisource','mediawiki','wikisource','local','bs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bs.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//bs.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(99,'bugwiki','mediawiki','wikipedia','local','bug','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bug.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//bug.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(100,'bxrwiki','mediawiki','wikipedia','local','bxr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//bxr.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//bxr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(101,'cawiki','mediawiki','wikipedia','local','ca','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ca.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ca.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(102,'cawiktionary','mediawiki','wiktionary','local','ca','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ca.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ca.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(103,'cawikibooks','mediawiki','wikibooks','local','ca','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ca.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ca.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(104,'cawikinews','mediawiki','wikinews','local','ca','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ca.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ca.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(105,'cawikiquote','mediawiki','wikiquote','local','ca','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ca.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ca.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(106,'cawikisource','mediawiki','wikisource','local','ca','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ca.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ca.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(107,'cbk_zamwiki','mediawiki','wikipedia','local','cbk-zam','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//cbk-zam.wikipedia.org/w/$1\";s:9:\"page_path\";s:31:\"//cbk-zam.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(108,'cdowiki','mediawiki','wikipedia','local','cdo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cdo.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//cdo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(109,'cewiki','mediawiki','wikipedia','local','ce','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ce.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ce.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(110,'cebwiki','mediawiki','wikipedia','local','ceb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ceb.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ceb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(111,'chwiki','mediawiki','wikipedia','local','ch','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ch.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ch.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(112,'chwiktionary','mediawiki','wiktionary','local','ch','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ch.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ch.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(113,'chwikibooks','mediawiki','wikibooks','local','ch','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ch.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ch.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(114,'chowiki','mediawiki','wikipedia','local','cho','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cho.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//cho.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(115,'chrwiki','mediawiki','wikipedia','local','chr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//chr.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//chr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(116,'chrwiktionary','mediawiki','wiktionary','local','chr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//chr.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//chr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(117,'chywiki','mediawiki','wikipedia','local','chy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//chy.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//chy.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(118,'ckbwiki','mediawiki','wikipedia','local','ckb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ckb.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ckb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(119,'cowiki','mediawiki','wikipedia','local','co','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//co.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//co.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(120,'cowiktionary','mediawiki','wiktionary','local','co','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//co.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//co.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(121,'cowikibooks','mediawiki','wikibooks','local','co','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//co.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//co.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(122,'cowikiquote','mediawiki','wikiquote','local','co','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//co.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//co.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(123,'crwiki','mediawiki','wikipedia','local','cr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//cr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(124,'crwiktionary','mediawiki','wiktionary','local','cr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cr.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//cr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(125,'crwikiquote','mediawiki','wikiquote','local','cr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//cr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(126,'crhwiki','mediawiki','wikipedia','local','crh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//crh.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//crh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(127,'cswiki','mediawiki','wikipedia','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cs.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//cs.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(128,'cswiktionary','mediawiki','wiktionary','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cs.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//cs.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(129,'cswikibooks','mediawiki','wikibooks','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cs.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//cs.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(130,'cswikinews','mediawiki','wikinews','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//cs.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//cs.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(131,'cswikiquote','mediawiki','wikiquote','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cs.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//cs.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(132,'cswikisource','mediawiki','wikisource','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cs.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//cs.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(133,'cswikiversity','mediawiki','wikiversity','local','cs','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//cs.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//cs.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(134,'csbwiki','mediawiki','wikipedia','local','csb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//csb.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//csb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(135,'csbwiktionary','mediawiki','wiktionary','local','csb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//csb.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//csb.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(136,'cuwiki','mediawiki','wikipedia','local','cu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//cu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(137,'cvwiki','mediawiki','wikipedia','local','cv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//cv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(138,'cvwikibooks','mediawiki','wikibooks','local','cv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cv.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//cv.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(139,'cywiki','mediawiki','wikipedia','local','cy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cy.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//cy.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(140,'cywiktionary','mediawiki','wiktionary','local','cy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cy.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//cy.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(141,'cywikibooks','mediawiki','wikibooks','local','cy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cy.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//cy.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(142,'cywikiquote','mediawiki','wikiquote','local','cy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//cy.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//cy.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(143,'cywikisource','mediawiki','wikisource','local','cy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//cy.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//cy.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(144,'dawiki','mediawiki','wikipedia','local','da','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//da.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//da.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(145,'dawiktionary','mediawiki','wiktionary','local','da','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//da.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//da.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(146,'dawikibooks','mediawiki','wikibooks','local','da','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//da.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//da.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(147,'dawikiquote','mediawiki','wikiquote','local','da','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//da.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//da.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(148,'dawikisource','mediawiki','wikisource','local','da','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//da.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//da.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(149,'dewiki','mediawiki','wikipedia','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//de.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//de.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(150,'dewiktionary','mediawiki','wiktionary','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//de.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//de.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(151,'dewikibooks','mediawiki','wikibooks','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//de.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//de.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(152,'dewikinews','mediawiki','wikinews','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//de.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//de.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(153,'dewikiquote','mediawiki','wikiquote','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//de.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//de.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(154,'dewikisource','mediawiki','wikisource','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//de.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//de.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(155,'dewikiversity','mediawiki','wikiversity','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//de.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//de.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(156,'dewikivoyage','mediawiki','wikivoyage','local','de','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//de.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//de.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(157,'diqwiki','mediawiki','wikipedia','local','diq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//diq.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//diq.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(158,'dsbwiki','mediawiki','wikipedia','local','dsb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//dsb.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//dsb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(159,'dvwiki','mediawiki','wikipedia','local','dv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//dv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//dv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(160,'dvwiktionary','mediawiki','wiktionary','local','dv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//dv.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//dv.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(161,'dzwiki','mediawiki','wikipedia','local','dz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//dz.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//dz.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(162,'dzwiktionary','mediawiki','wiktionary','local','dz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//dz.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//dz.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(163,'eewiki','mediawiki','wikipedia','local','ee','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ee.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ee.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(164,'elwiki','mediawiki','wikipedia','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//el.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//el.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(165,'elwiktionary','mediawiki','wiktionary','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//el.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//el.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(166,'elwikibooks','mediawiki','wikibooks','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//el.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//el.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(167,'elwikinews','mediawiki','wikinews','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//el.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//el.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(168,'elwikiquote','mediawiki','wikiquote','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//el.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//el.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(169,'elwikisource','mediawiki','wikisource','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//el.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//el.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(170,'elwikiversity','mediawiki','wikiversity','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//el.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//el.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(171,'emlwiki','mediawiki','wikipedia','local','eml','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//eml.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//eml.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(172,'enwiki','mediawiki','wikipedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//en.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//en.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(173,'enwiktionary','mediawiki','wiktionary','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//en.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//en.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(174,'enwikibooks','mediawiki','wikibooks','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//en.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//en.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(175,'enwikinews','mediawiki','wikinews','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//en.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//en.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(176,'enwikiquote','mediawiki','wikiquote','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//en.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//en.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(177,'enwikisource','mediawiki','wikisource','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//en.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//en.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(178,'enwikiversity','mediawiki','wikiversity','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//en.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//en.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(179,'enwikivoyage','mediawiki','wikivoyage','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//en.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//en.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(180,'eowiki','mediawiki','wikipedia','local','eo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//eo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//eo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(181,'eowiktionary','mediawiki','wiktionary','local','eo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//eo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//eo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(182,'eowikibooks','mediawiki','wikibooks','local','eo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//eo.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//eo.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(183,'eowikinews','mediawiki','wikinews','local','eo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//eo.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//eo.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(184,'eowikiquote','mediawiki','wikiquote','local','eo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//eo.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//eo.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(185,'eowikisource','mediawiki','wikisource','local','eo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//eo.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//eo.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(186,'eswiki','mediawiki','wikipedia','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//es.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//es.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(187,'eswiktionary','mediawiki','wiktionary','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//es.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//es.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(188,'eswikibooks','mediawiki','wikibooks','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//es.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//es.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(189,'eswikinews','mediawiki','wikinews','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//es.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//es.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(190,'eswikiquote','mediawiki','wikiquote','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//es.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//es.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(191,'eswikisource','mediawiki','wikisource','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//es.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//es.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(192,'eswikiversity','mediawiki','wikiversity','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//es.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//es.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(193,'etwiki','mediawiki','wikipedia','local','et','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//et.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//et.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(194,'etwiktionary','mediawiki','wiktionary','local','et','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//et.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//et.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(195,'etwikibooks','mediawiki','wikibooks','local','et','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//et.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//et.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(196,'etwikiquote','mediawiki','wikiquote','local','et','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//et.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//et.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(197,'etwikisource','mediawiki','wikisource','local','et','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//et.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//et.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(198,'euwiki','mediawiki','wikipedia','local','eu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//eu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//eu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(199,'euwiktionary','mediawiki','wiktionary','local','eu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//eu.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//eu.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(200,'euwikibooks','mediawiki','wikibooks','local','eu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//eu.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//eu.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(201,'euwikiquote','mediawiki','wikiquote','local','eu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//eu.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//eu.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(202,'extwiki','mediawiki','wikipedia','local','ext','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ext.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ext.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(203,'fawiki','mediawiki','wikipedia','local','fa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fa.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fa.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(204,'fawiktionary','mediawiki','wiktionary','local','fa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fa.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//fa.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(205,'fawikibooks','mediawiki','wikibooks','local','fa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fa.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//fa.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(206,'fawikinews','mediawiki','wikinews','local','fa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//fa.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//fa.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(207,'fawikiquote','mediawiki','wikiquote','local','fa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fa.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//fa.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(208,'fawikisource','mediawiki','wikisource','local','fa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fa.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//fa.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(209,'ffwiki','mediawiki','wikipedia','local','ff','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ff.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ff.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(210,'fiwiki','mediawiki','wikipedia','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(211,'fiwiktionary','mediawiki','wiktionary','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//fi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(212,'fiwikibooks','mediawiki','wikibooks','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fi.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//fi.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(213,'fiwikinews','mediawiki','wikinews','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//fi.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//fi.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(214,'fiwikiquote','mediawiki','wikiquote','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fi.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//fi.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(215,'fiwikisource','mediawiki','wikisource','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fi.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//fi.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(216,'fiwikiversity','mediawiki','wikiversity','local','fi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//fi.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//fi.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(217,'fiu_vrowiki','mediawiki','wikipedia','local','fiu-vro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//fiu-vro.wikipedia.org/w/$1\";s:9:\"page_path\";s:31:\"//fiu-vro.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(218,'fjwiki','mediawiki','wikipedia','local','fj','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fj.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fj.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(219,'fjwiktionary','mediawiki','wiktionary','local','fj','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fj.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//fj.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(220,'fowiki','mediawiki','wikipedia','local','fo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(221,'fowiktionary','mediawiki','wiktionary','local','fo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//fo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(222,'fowikisource','mediawiki','wikisource','local','fo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fo.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//fo.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(223,'frwiki','mediawiki','wikipedia','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(224,'frwiktionary','mediawiki','wiktionary','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fr.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//fr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(225,'frwikibooks','mediawiki','wikibooks','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fr.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//fr.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(226,'frwikinews','mediawiki','wikinews','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//fr.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//fr.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(227,'frwikiquote','mediawiki','wikiquote','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//fr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(228,'frwikisource','mediawiki','wikisource','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fr.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//fr.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(229,'frwikiversity','mediawiki','wikiversity','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//fr.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//fr.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(230,'frwikivoyage','mediawiki','wikivoyage','local','fr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fr.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//fr.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(231,'frpwiki','mediawiki','wikipedia','local','frp','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//frp.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//frp.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(232,'frrwiki','mediawiki','wikipedia','local','frr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//frr.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//frr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(233,'furwiki','mediawiki','wikipedia','local','fur','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fur.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//fur.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(234,'fywiki','mediawiki','wikipedia','local','fy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fy.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fy.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(235,'fywiktionary','mediawiki','wiktionary','local','fy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fy.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//fy.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(236,'fywikibooks','mediawiki','wikibooks','local','fy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fy.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//fy.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(237,'gawiki','mediawiki','wikipedia','local','ga','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ga.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ga.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(238,'gawiktionary','mediawiki','wiktionary','local','ga','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ga.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ga.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(239,'gawikibooks','mediawiki','wikibooks','local','ga','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ga.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ga.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(240,'gawikiquote','mediawiki','wikiquote','local','ga','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ga.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ga.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(241,'gagwiki','mediawiki','wikipedia','local','gag','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gag.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//gag.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(242,'ganwiki','mediawiki','wikipedia','local','gan','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gan.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//gan.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(243,'gdwiki','mediawiki','wikipedia','local','gd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gd.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//gd.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(244,'gdwiktionary','mediawiki','wiktionary','local','gd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gd.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//gd.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(245,'glwiki','mediawiki','wikipedia','local','gl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gl.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//gl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(246,'glwiktionary','mediawiki','wiktionary','local','gl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gl.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//gl.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(247,'glwikibooks','mediawiki','wikibooks','local','gl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gl.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//gl.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(248,'glwikiquote','mediawiki','wikiquote','local','gl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gl.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//gl.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(249,'glwikisource','mediawiki','wikisource','local','gl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gl.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//gl.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(250,'glkwiki','mediawiki','wikipedia','local','glk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//glk.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//glk.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(251,'gnwiki','mediawiki','wikipedia','local','gn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//gn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(252,'gnwiktionary','mediawiki','wiktionary','local','gn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//gn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(253,'gnwikibooks','mediawiki','wikibooks','local','gn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gn.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//gn.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(254,'gotwiki','mediawiki','wikipedia','local','got','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//got.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//got.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(255,'gotwikibooks','mediawiki','wikibooks','local','got','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//got.wikibooks.org/w/$1\";s:9:\"page_path\";s:27:\"//got.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(256,'guwiki','mediawiki','wikipedia','local','gu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//gu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(257,'guwiktionary','mediawiki','wiktionary','local','gu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gu.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//gu.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(258,'guwikibooks','mediawiki','wikibooks','local','gu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gu.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//gu.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(259,'guwikiquote','mediawiki','wikiquote','local','gu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gu.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//gu.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(260,'guwikisource','mediawiki','wikisource','local','gu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gu.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//gu.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(261,'gvwiki','mediawiki','wikipedia','local','gv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//gv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//gv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(262,'gvwiktionary','mediawiki','wiktionary','local','gv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//gv.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//gv.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(263,'hawiki','mediawiki','wikipedia','local','ha','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ha.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ha.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(264,'hawiktionary','mediawiki','wiktionary','local','ha','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ha.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ha.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(265,'hakwiki','mediawiki','wikipedia','local','hak','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hak.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//hak.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(266,'hawwiki','mediawiki','wikipedia','local','haw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//haw.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//haw.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(267,'hewiki','mediawiki','wikipedia','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//he.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//he.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(268,'hewiktionary','mediawiki','wiktionary','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//he.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//he.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(269,'hewikibooks','mediawiki','wikibooks','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//he.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//he.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(270,'hewikinews','mediawiki','wikinews','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//he.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//he.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(271,'hewikiquote','mediawiki','wikiquote','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//he.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//he.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(272,'hewikisource','mediawiki','wikisource','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//he.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//he.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(273,'hiwiki','mediawiki','wikipedia','local','hi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//hi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(274,'hiwiktionary','mediawiki','wiktionary','local','hi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//hi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(275,'hiwikibooks','mediawiki','wikibooks','local','hi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hi.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//hi.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(276,'hiwikiquote','mediawiki','wikiquote','local','hi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hi.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//hi.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(277,'hifwiki','mediawiki','wikipedia','local','hif','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hif.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//hif.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(278,'howiki','mediawiki','wikipedia','local','ho','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ho.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ho.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(279,'hrwiki','mediawiki','wikipedia','local','hr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//hr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(280,'hrwiktionary','mediawiki','wiktionary','local','hr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hr.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//hr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(281,'hrwikibooks','mediawiki','wikibooks','local','hr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hr.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//hr.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(282,'hrwikiquote','mediawiki','wikiquote','local','hr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//hr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(283,'hrwikisource','mediawiki','wikisource','local','hr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hr.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//hr.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(284,'hsbwiki','mediawiki','wikipedia','local','hsb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hsb.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//hsb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(285,'hsbwiktionary','mediawiki','wiktionary','local','hsb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//hsb.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//hsb.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(286,'htwiki','mediawiki','wikipedia','local','ht','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ht.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ht.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(287,'htwikisource','mediawiki','wikisource','local','ht','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ht.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ht.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(288,'huwiki','mediawiki','wikipedia','local','hu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//hu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(289,'huwiktionary','mediawiki','wiktionary','local','hu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hu.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//hu.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(290,'huwikibooks','mediawiki','wikibooks','local','hu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hu.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//hu.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(291,'huwikinews','mediawiki','wikinews','local','hu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//hu.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//hu.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(292,'huwikiquote','mediawiki','wikiquote','local','hu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hu.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//hu.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(293,'huwikisource','mediawiki','wikisource','local','hu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hu.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//hu.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(294,'hywiki','mediawiki','wikipedia','local','hy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hy.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//hy.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(295,'hywiktionary','mediawiki','wiktionary','local','hy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hy.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//hy.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(296,'hywikibooks','mediawiki','wikibooks','local','hy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hy.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//hy.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(297,'hywikiquote','mediawiki','wikiquote','local','hy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hy.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//hy.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(298,'hywikisource','mediawiki','wikisource','local','hy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//hy.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//hy.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(299,'hzwiki','mediawiki','wikipedia','local','hz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//hz.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//hz.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(300,'iawiki','mediawiki','wikipedia','local','ia','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ia.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ia.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(301,'iawiktionary','mediawiki','wiktionary','local','ia','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ia.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ia.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(302,'iawikibooks','mediawiki','wikibooks','local','ia','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ia.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ia.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(303,'idwiki','mediawiki','wikipedia','local','id','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//id.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//id.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(304,'idwiktionary','mediawiki','wiktionary','local','id','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//id.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//id.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(305,'idwikibooks','mediawiki','wikibooks','local','id','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//id.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//id.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(306,'idwikiquote','mediawiki','wikiquote','local','id','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//id.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//id.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(307,'idwikisource','mediawiki','wikisource','local','id','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//id.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//id.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(308,'iewiki','mediawiki','wikipedia','local','ie','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ie.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ie.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(309,'iewiktionary','mediawiki','wiktionary','local','ie','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ie.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ie.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(310,'iewikibooks','mediawiki','wikibooks','local','ie','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ie.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ie.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(311,'igwiki','mediawiki','wikipedia','local','ig','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ig.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ig.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(312,'iiwiki','mediawiki','wikipedia','local','ii','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ii.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ii.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(313,'ikwiki','mediawiki','wikipedia','local','ik','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ik.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ik.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(314,'ikwiktionary','mediawiki','wiktionary','local','ik','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ik.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ik.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(315,'ilowiki','mediawiki','wikipedia','local','ilo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ilo.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ilo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(316,'iowiki','mediawiki','wikipedia','local','io','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//io.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//io.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(317,'iowiktionary','mediawiki','wiktionary','local','io','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//io.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//io.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(318,'iswiki','mediawiki','wikipedia','local','is','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//is.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//is.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(319,'iswiktionary','mediawiki','wiktionary','local','is','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//is.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//is.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(320,'iswikibooks','mediawiki','wikibooks','local','is','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//is.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//is.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(321,'iswikiquote','mediawiki','wikiquote','local','is','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//is.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//is.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(322,'iswikisource','mediawiki','wikisource','local','is','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//is.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//is.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(323,'itwiki','mediawiki','wikipedia','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//it.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//it.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(324,'itwiktionary','mediawiki','wiktionary','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//it.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//it.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(325,'itwikibooks','mediawiki','wikibooks','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//it.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//it.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(326,'itwikinews','mediawiki','wikinews','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//it.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//it.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(327,'itwikiquote','mediawiki','wikiquote','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//it.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//it.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(328,'itwikisource','mediawiki','wikisource','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//it.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//it.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(329,'itwikiversity','mediawiki','wikiversity','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//it.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//it.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(330,'itwikivoyage','mediawiki','wikivoyage','local','it','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//it.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//it.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(331,'iuwiki','mediawiki','wikipedia','local','iu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//iu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//iu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(332,'iuwiktionary','mediawiki','wiktionary','local','iu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//iu.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//iu.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(333,'jawiki','mediawiki','wikipedia','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ja.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ja.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(334,'jawiktionary','mediawiki','wiktionary','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ja.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ja.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(335,'jawikibooks','mediawiki','wikibooks','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ja.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ja.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(336,'jawikinews','mediawiki','wikinews','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ja.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ja.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(337,'jawikiquote','mediawiki','wikiquote','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ja.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ja.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(338,'jawikisource','mediawiki','wikisource','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ja.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ja.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(339,'jawikiversity','mediawiki','wikiversity','local','ja','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ja.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//ja.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(340,'jbowiki','mediawiki','wikipedia','local','jbo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//jbo.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//jbo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(341,'jbowiktionary','mediawiki','wiktionary','local','jbo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//jbo.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//jbo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(342,'jvwiki','mediawiki','wikipedia','local','jv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//jv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//jv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(343,'jvwiktionary','mediawiki','wiktionary','local','jv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//jv.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//jv.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(344,'kawiki','mediawiki','wikipedia','local','ka','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ka.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ka.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(345,'kawiktionary','mediawiki','wiktionary','local','ka','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ka.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ka.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(346,'kawikibooks','mediawiki','wikibooks','local','ka','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ka.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ka.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(347,'kawikiquote','mediawiki','wikiquote','local','ka','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ka.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ka.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(348,'kaawiki','mediawiki','wikipedia','local','kaa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kaa.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//kaa.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(349,'kabwiki','mediawiki','wikipedia','local','kab','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kab.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//kab.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(350,'kbdwiki','mediawiki','wikipedia','local','kbd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kbd.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//kbd.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(351,'kgwiki','mediawiki','wikipedia','local','kg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kg.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(352,'kiwiki','mediawiki','wikipedia','local','ki','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ki.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ki.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(353,'kjwiki','mediawiki','wikipedia','local','kj','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kj.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kj.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(354,'kkwiki','mediawiki','wikipedia','local','kk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kk.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kk.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(355,'kkwiktionary','mediawiki','wiktionary','local','kk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kk.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//kk.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(356,'kkwikibooks','mediawiki','wikibooks','local','kk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kk.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//kk.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(357,'kkwikiquote','mediawiki','wikiquote','local','kk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kk.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//kk.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(358,'klwiki','mediawiki','wikipedia','local','kl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kl.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(359,'klwiktionary','mediawiki','wiktionary','local','kl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kl.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//kl.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(360,'kmwiki','mediawiki','wikipedia','local','km','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//km.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//km.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(361,'kmwiktionary','mediawiki','wiktionary','local','km','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//km.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//km.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(362,'kmwikibooks','mediawiki','wikibooks','local','km','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//km.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//km.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(363,'knwiki','mediawiki','wikipedia','local','kn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(364,'knwiktionary','mediawiki','wiktionary','local','kn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//kn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(365,'knwikibooks','mediawiki','wikibooks','local','kn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kn.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//kn.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(366,'knwikiquote','mediawiki','wikiquote','local','kn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kn.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//kn.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(367,'knwikisource','mediawiki','wikisource','local','kn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kn.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//kn.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(368,'kowiki','mediawiki','wikipedia','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ko.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ko.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(369,'kowiktionary','mediawiki','wiktionary','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ko.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ko.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(370,'kowikibooks','mediawiki','wikibooks','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ko.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ko.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(371,'kowikinews','mediawiki','wikinews','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ko.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ko.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(372,'kowikiquote','mediawiki','wikiquote','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ko.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ko.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(373,'kowikisource','mediawiki','wikisource','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ko.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ko.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(374,'koiwiki','mediawiki','wikipedia','local','koi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//koi.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//koi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(375,'krwiki','mediawiki','wikipedia','local','kr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(376,'krwikiquote','mediawiki','wikiquote','local','kr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//kr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(377,'krcwiki','mediawiki','wikipedia','local','krc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//krc.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//krc.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(378,'kswiki','mediawiki','wikipedia','local','ks','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ks.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ks.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(379,'kswiktionary','mediawiki','wiktionary','local','ks','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ks.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ks.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(380,'kswikibooks','mediawiki','wikibooks','local','ks','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ks.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ks.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(381,'kswikiquote','mediawiki','wikiquote','local','ks','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ks.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ks.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(382,'kshwiki','mediawiki','wikipedia','local','ksh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ksh.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ksh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(383,'kuwiki','mediawiki','wikipedia','local','ku','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ku.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ku.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(384,'kuwiktionary','mediawiki','wiktionary','local','ku','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ku.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ku.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(385,'kuwikibooks','mediawiki','wikibooks','local','ku','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ku.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ku.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(386,'kuwikiquote','mediawiki','wikiquote','local','ku','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ku.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ku.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(387,'kvwiki','mediawiki','wikipedia','local','kv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(388,'kwwiki','mediawiki','wikipedia','local','kw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kw.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//kw.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(389,'kwwiktionary','mediawiki','wiktionary','local','kw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//kw.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//kw.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(390,'kwwikiquote','mediawiki','wikiquote','local','kw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//kw.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//kw.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(391,'kywiki','mediawiki','wikipedia','local','ky','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ky.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ky.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(392,'kywiktionary','mediawiki','wiktionary','local','ky','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ky.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ky.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(393,'kywikibooks','mediawiki','wikibooks','local','ky','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ky.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ky.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(394,'kywikiquote','mediawiki','wikiquote','local','ky','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ky.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ky.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(395,'lawiki','mediawiki','wikipedia','local','la','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//la.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//la.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(396,'lawiktionary','mediawiki','wiktionary','local','la','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//la.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//la.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(397,'lawikibooks','mediawiki','wikibooks','local','la','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//la.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//la.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(398,'lawikiquote','mediawiki','wikiquote','local','la','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//la.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//la.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(399,'lawikisource','mediawiki','wikisource','local','la','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//la.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//la.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(400,'ladwiki','mediawiki','wikipedia','local','lad','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lad.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//lad.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(401,'lbwiki','mediawiki','wikipedia','local','lb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lb.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//lb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(402,'lbwiktionary','mediawiki','wiktionary','local','lb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lb.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//lb.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(403,'lbwikibooks','mediawiki','wikibooks','local','lb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lb.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//lb.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(404,'lbwikiquote','mediawiki','wikiquote','local','lb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lb.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//lb.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(405,'lbewiki','mediawiki','wikipedia','local','lbe','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lbe.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//lbe.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(406,'lezwiki','mediawiki','wikipedia','local','lez','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lez.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//lez.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(407,'lgwiki','mediawiki','wikipedia','local','lg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lg.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//lg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(408,'liwiki','mediawiki','wikipedia','local','li','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//li.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//li.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(409,'liwiktionary','mediawiki','wiktionary','local','li','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//li.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//li.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(410,'liwikibooks','mediawiki','wikibooks','local','li','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//li.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//li.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(411,'liwikiquote','mediawiki','wikiquote','local','li','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//li.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//li.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(412,'liwikisource','mediawiki','wikisource','local','li','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//li.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//li.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(413,'lijwiki','mediawiki','wikipedia','local','lij','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lij.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//lij.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(414,'lmowiki','mediawiki','wikipedia','local','lmo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lmo.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//lmo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(415,'lnwiki','mediawiki','wikipedia','local','ln','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ln.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ln.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(416,'lnwiktionary','mediawiki','wiktionary','local','ln','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ln.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ln.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(417,'lnwikibooks','mediawiki','wikibooks','local','ln','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ln.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ln.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(418,'lowiki','mediawiki','wikipedia','local','lo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//lo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(419,'lowiktionary','mediawiki','wiktionary','local','lo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//lo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(420,'ltwiki','mediawiki','wikipedia','local','lt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lt.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//lt.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(421,'ltwiktionary','mediawiki','wiktionary','local','lt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lt.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//lt.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(422,'ltwikibooks','mediawiki','wikibooks','local','lt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lt.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//lt.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(423,'ltwikiquote','mediawiki','wikiquote','local','lt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lt.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//lt.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(424,'ltwikisource','mediawiki','wikisource','local','lt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lt.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//lt.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(425,'ltgwiki','mediawiki','wikipedia','local','ltg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ltg.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ltg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(426,'lvwiki','mediawiki','wikipedia','local','lv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//lv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(427,'lvwiktionary','mediawiki','wiktionary','local','lv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//lv.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//lv.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(428,'lvwikibooks','mediawiki','wikibooks','local','lv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//lv.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//lv.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(429,'map_bmswiki','mediawiki','wikipedia','local','map-bms','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//map-bms.wikipedia.org/w/$1\";s:9:\"page_path\";s:31:\"//map-bms.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(430,'mdfwiki','mediawiki','wikipedia','local','mdf','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mdf.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//mdf.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(431,'mgwiki','mediawiki','wikipedia','local','mg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mg.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(432,'mgwiktionary','mediawiki','wiktionary','local','mg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mg.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mg.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(433,'mgwikibooks','mediawiki','wikibooks','local','mg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mg.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//mg.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(434,'mhwiki','mediawiki','wikipedia','local','mh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mh.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(435,'mhwiktionary','mediawiki','wiktionary','local','mh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mh.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mh.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(436,'mhrwiki','mediawiki','wikipedia','local','mhr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mhr.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//mhr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(437,'miwiki','mediawiki','wikipedia','local','mi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(438,'miwiktionary','mediawiki','wiktionary','local','mi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(439,'miwikibooks','mediawiki','wikibooks','local','mi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mi.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//mi.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(440,'mkwiki','mediawiki','wikipedia','local','mk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mk.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mk.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(441,'mkwiktionary','mediawiki','wiktionary','local','mk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mk.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mk.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(442,'mkwikibooks','mediawiki','wikibooks','local','mk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mk.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//mk.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(443,'mkwikisource','mediawiki','wikisource','local','mk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mk.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//mk.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(444,'mlwiki','mediawiki','wikipedia','local','ml','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ml.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ml.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(445,'mlwiktionary','mediawiki','wiktionary','local','ml','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ml.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ml.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(446,'mlwikibooks','mediawiki','wikibooks','local','ml','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ml.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ml.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(447,'mlwikiquote','mediawiki','wikiquote','local','ml','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ml.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ml.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(448,'mlwikisource','mediawiki','wikisource','local','ml','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ml.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ml.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(449,'mnwiki','mediawiki','wikipedia','local','mn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(450,'mnwiktionary','mediawiki','wiktionary','local','mn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(451,'mnwikibooks','mediawiki','wikibooks','local','mn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mn.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//mn.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(452,'mowiki','mediawiki','wikipedia','local','mo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(453,'mowiktionary','mediawiki','wiktionary','local','mo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(454,'mrwiki','mediawiki','wikipedia','local','mr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(455,'mrwiktionary','mediawiki','wiktionary','local','mr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mr.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(456,'mrwikibooks','mediawiki','wikibooks','local','mr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mr.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//mr.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(457,'mrwikiquote','mediawiki','wikiquote','local','mr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//mr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(458,'mrwikisource','mediawiki','wikisource','local','mr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mr.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//mr.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(459,'mrjwiki','mediawiki','wikipedia','local','mrj','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mrj.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//mrj.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(460,'mswiki','mediawiki','wikipedia','local','ms','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ms.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ms.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(461,'mswiktionary','mediawiki','wiktionary','local','ms','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ms.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ms.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(462,'mswikibooks','mediawiki','wikibooks','local','ms','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ms.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ms.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(463,'mtwiki','mediawiki','wikipedia','local','mt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mt.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mt.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(464,'mtwiktionary','mediawiki','wiktionary','local','mt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mt.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//mt.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(465,'muswiki','mediawiki','wikipedia','local','mus','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mus.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//mus.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(466,'mwlwiki','mediawiki','wikipedia','local','mwl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mwl.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//mwl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(467,'mywiki','mediawiki','wikipedia','local','my','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//my.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//my.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(468,'mywiktionary','mediawiki','wiktionary','local','my','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//my.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//my.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(469,'mywikibooks','mediawiki','wikibooks','local','my','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//my.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//my.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(470,'myvwiki','mediawiki','wikipedia','local','myv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//myv.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//myv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(471,'mznwiki','mediawiki','wikipedia','local','mzn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//mzn.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//mzn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(472,'nawiki','mediawiki','wikipedia','local','na','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//na.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//na.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(473,'nawiktionary','mediawiki','wiktionary','local','na','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//na.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//na.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(474,'nawikibooks','mediawiki','wikibooks','local','na','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//na.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//na.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(475,'nawikiquote','mediawiki','wikiquote','local','na','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//na.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//na.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(476,'nahwiki','mediawiki','wikipedia','local','nah','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nah.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nah.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(477,'nahwiktionary','mediawiki','wiktionary','local','nah','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//nah.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//nah.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(478,'nahwikibooks','mediawiki','wikibooks','local','nah','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nah.wikibooks.org/w/$1\";s:9:\"page_path\";s:27:\"//nah.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(479,'napwiki','mediawiki','wikipedia','local','nap','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nap.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nap.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(480,'ndswiki','mediawiki','wikipedia','local','nds','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nds.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nds.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(481,'ndswiktionary','mediawiki','wiktionary','local','nds','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//nds.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//nds.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(482,'ndswikibooks','mediawiki','wikibooks','local','nds','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nds.wikibooks.org/w/$1\";s:9:\"page_path\";s:27:\"//nds.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(483,'ndswikiquote','mediawiki','wikiquote','local','nds','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nds.wikiquote.org/w/$1\";s:9:\"page_path\";s:27:\"//nds.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(484,'nds_nlwiki','mediawiki','wikipedia','local','nds-nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//nds-nl.wikipedia.org/w/$1\";s:9:\"page_path\";s:30:\"//nds-nl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(485,'newiki','mediawiki','wikipedia','local','ne','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ne.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ne.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(486,'newiktionary','mediawiki','wiktionary','local','ne','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ne.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ne.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(487,'newikibooks','mediawiki','wikibooks','local','ne','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ne.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ne.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(488,'newwiki','mediawiki','wikipedia','local','new','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//new.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//new.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(489,'ngwiki','mediawiki','wikipedia','local','ng','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ng.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ng.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(490,'nlwiki','mediawiki','wikipedia','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nl.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//nl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(491,'nlwiktionary','mediawiki','wiktionary','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nl.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//nl.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(492,'nlwikibooks','mediawiki','wikibooks','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nl.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//nl.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(493,'nlwikinews','mediawiki','wikinews','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//nl.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//nl.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(494,'nlwikiquote','mediawiki','wikiquote','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nl.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//nl.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(495,'nlwikisource','mediawiki','wikisource','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nl.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//nl.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(496,'nlwikivoyage','mediawiki','wikivoyage','local','nl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nl.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//nl.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(497,'nnwiki','mediawiki','wikipedia','local','nn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//nn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(498,'nnwiktionary','mediawiki','wiktionary','local','nn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//nn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(499,'nnwikiquote','mediawiki','wikiquote','local','nn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nn.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//nn.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(500,'nowiki','mediawiki','wikipedia','local','no','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//no.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//no.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(501,'nowiktionary','mediawiki','wiktionary','local','no','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//no.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//no.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(502,'nowikibooks','mediawiki','wikibooks','local','no','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//no.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//no.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(503,'nowikinews','mediawiki','wikinews','local','no','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//no.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//no.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(504,'nowikiquote','mediawiki','wikiquote','local','no','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//no.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//no.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(505,'nowikisource','mediawiki','wikisource','local','no','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//no.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//no.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(506,'novwiki','mediawiki','wikipedia','local','nov','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nov.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nov.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(507,'nrmwiki','mediawiki','wikipedia','local','nrm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nrm.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nrm.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(508,'nsowiki','mediawiki','wikipedia','local','nso','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nso.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nso.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(509,'nvwiki','mediawiki','wikipedia','local','nv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//nv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(510,'nywiki','mediawiki','wikipedia','local','ny','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ny.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ny.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(511,'ocwiki','mediawiki','wikipedia','local','oc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//oc.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//oc.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(512,'ocwiktionary','mediawiki','wiktionary','local','oc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//oc.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//oc.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(513,'ocwikibooks','mediawiki','wikibooks','local','oc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//oc.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//oc.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(514,'omwiki','mediawiki','wikipedia','local','om','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//om.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//om.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(515,'omwiktionary','mediawiki','wiktionary','local','om','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//om.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//om.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(516,'orwiki','mediawiki','wikipedia','local','or','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//or.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//or.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(517,'orwiktionary','mediawiki','wiktionary','local','or','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//or.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//or.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(518,'oswiki','mediawiki','wikipedia','local','os','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//os.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//os.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(519,'pawiki','mediawiki','wikipedia','local','pa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pa.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//pa.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(520,'pawiktionary','mediawiki','wiktionary','local','pa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pa.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//pa.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(521,'pawikibooks','mediawiki','wikibooks','local','pa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pa.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//pa.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(522,'pagwiki','mediawiki','wikipedia','local','pag','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pag.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pag.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(523,'pamwiki','mediawiki','wikipedia','local','pam','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pam.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pam.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(524,'papwiki','mediawiki','wikipedia','local','pap','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pap.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pap.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(525,'pcdwiki','mediawiki','wikipedia','local','pcd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pcd.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pcd.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(526,'pdcwiki','mediawiki','wikipedia','local','pdc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pdc.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pdc.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(527,'pflwiki','mediawiki','wikipedia','local','pfl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pfl.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pfl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(528,'piwiki','mediawiki','wikipedia','local','pi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//pi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(529,'piwiktionary','mediawiki','wiktionary','local','pi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//pi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(530,'pihwiki','mediawiki','wikipedia','local','pih','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pih.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pih.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(531,'plwiki','mediawiki','wikipedia','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pl.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//pl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(532,'plwiktionary','mediawiki','wiktionary','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pl.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//pl.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(533,'plwikibooks','mediawiki','wikibooks','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pl.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//pl.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(534,'plwikinews','mediawiki','wikinews','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//pl.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//pl.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(535,'plwikiquote','mediawiki','wikiquote','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pl.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//pl.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(536,'plwikisource','mediawiki','wikisource','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pl.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//pl.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(537,'pmswiki','mediawiki','wikipedia','local','pms','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pms.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pms.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(538,'pnbwiki','mediawiki','wikipedia','local','pnb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pnb.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pnb.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(539,'pnbwiktionary','mediawiki','wiktionary','local','pnb','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//pnb.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//pnb.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(540,'pntwiki','mediawiki','wikipedia','local','pnt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pnt.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//pnt.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(541,'pswiki','mediawiki','wikipedia','local','ps','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ps.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ps.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(542,'pswiktionary','mediawiki','wiktionary','local','ps','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ps.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ps.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(543,'pswikibooks','mediawiki','wikibooks','local','ps','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ps.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ps.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(544,'ptwiki','mediawiki','wikipedia','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pt.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//pt.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(545,'ptwiktionary','mediawiki','wiktionary','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pt.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//pt.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(546,'ptwikibooks','mediawiki','wikibooks','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pt.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//pt.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(547,'ptwikinews','mediawiki','wikinews','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//pt.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//pt.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(548,'ptwikiquote','mediawiki','wikiquote','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pt.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//pt.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(549,'ptwikisource','mediawiki','wikisource','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pt.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//pt.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(550,'ptwikiversity','mediawiki','wikiversity','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//pt.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//pt.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(551,'quwiki','mediawiki','wikipedia','local','qu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//qu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//qu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(552,'quwiktionary','mediawiki','wiktionary','local','qu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//qu.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//qu.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(553,'quwikibooks','mediawiki','wikibooks','local','qu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//qu.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//qu.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(554,'quwikiquote','mediawiki','wikiquote','local','qu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//qu.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//qu.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(555,'rmwiki','mediawiki','wikipedia','local','rm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//rm.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//rm.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(556,'rmwiktionary','mediawiki','wiktionary','local','rm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//rm.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//rm.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(557,'rmwikibooks','mediawiki','wikibooks','local','rm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//rm.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//rm.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(558,'rmywiki','mediawiki','wikipedia','local','rmy','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//rmy.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//rmy.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(559,'rnwiki','mediawiki','wikipedia','local','rn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//rn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//rn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(560,'rnwiktionary','mediawiki','wiktionary','local','rn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//rn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//rn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(561,'rowiki','mediawiki','wikipedia','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ro.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ro.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(562,'rowiktionary','mediawiki','wiktionary','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ro.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ro.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(563,'rowikibooks','mediawiki','wikibooks','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ro.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ro.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(564,'rowikinews','mediawiki','wikinews','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ro.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ro.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(565,'rowikiquote','mediawiki','wikiquote','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ro.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ro.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(566,'rowikisource','mediawiki','wikisource','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ro.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ro.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(567,'roa_rupwiki','mediawiki','wikipedia','local','roa-rup','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//roa-rup.wikipedia.org/w/$1\";s:9:\"page_path\";s:31:\"//roa-rup.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(568,'roa_rupwiktionary','mediawiki','wiktionary','local','roa-rup','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//roa-rup.wiktionary.org/w/$1\";s:9:\"page_path\";s:32:\"//roa-rup.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(569,'roa_tarawiki','mediawiki','wikipedia','local','roa-tara','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//roa-tara.wikipedia.org/w/$1\";s:9:\"page_path\";s:32:\"//roa-tara.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(570,'ruwiki','mediawiki','wikipedia','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ru.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ru.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(571,'ruwiktionary','mediawiki','wiktionary','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ru.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ru.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(572,'ruwikibooks','mediawiki','wikibooks','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ru.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ru.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(573,'ruwikinews','mediawiki','wikinews','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ru.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ru.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(574,'ruwikiquote','mediawiki','wikiquote','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ru.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ru.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(575,'ruwikisource','mediawiki','wikisource','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ru.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ru.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(576,'ruwikiversity','mediawiki','wikiversity','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ru.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//ru.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(577,'ruwikivoyage','mediawiki','wikivoyage','local','ru','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ru.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//ru.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(578,'ruewiki','mediawiki','wikipedia','local','rue','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//rue.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//rue.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(579,'rwwiki','mediawiki','wikipedia','local','rw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//rw.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//rw.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(580,'rwwiktionary','mediawiki','wiktionary','local','rw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//rw.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//rw.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(581,'sawiki','mediawiki','wikipedia','local','sa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sa.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sa.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(582,'sawiktionary','mediawiki','wiktionary','local','sa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sa.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sa.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(583,'sawikibooks','mediawiki','wikibooks','local','sa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sa.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sa.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(584,'sawikisource','mediawiki','wikisource','local','sa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sa.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//sa.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(585,'sahwiki','mediawiki','wikipedia','local','sah','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sah.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//sah.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(586,'sahwikisource','mediawiki','wikisource','local','sah','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//sah.wikisource.org/w/$1\";s:9:\"page_path\";s:28:\"//sah.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(587,'scwiki','mediawiki','wikipedia','local','sc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sc.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sc.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(588,'scwiktionary','mediawiki','wiktionary','local','sc','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sc.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sc.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(589,'scnwiki','mediawiki','wikipedia','local','scn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//scn.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//scn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(590,'scnwiktionary','mediawiki','wiktionary','local','scn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//scn.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//scn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(591,'scowiki','mediawiki','wikipedia','local','sco','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sco.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//sco.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(592,'sdwiki','mediawiki','wikipedia','local','sd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sd.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sd.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(593,'sdwiktionary','mediawiki','wiktionary','local','sd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sd.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sd.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(594,'sdwikinews','mediawiki','wikinews','local','sd','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//sd.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//sd.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(595,'sewiki','mediawiki','wikipedia','local','se','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//se.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//se.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(596,'sewikibooks','mediawiki','wikibooks','local','se','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//se.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//se.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(597,'sgwiki','mediawiki','wikipedia','local','sg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sg.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(598,'sgwiktionary','mediawiki','wiktionary','local','sg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sg.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sg.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(599,'shwiki','mediawiki','wikipedia','local','sh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sh.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(600,'shwiktionary','mediawiki','wiktionary','local','sh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sh.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sh.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(601,'siwiki','mediawiki','wikipedia','local','si','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//si.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//si.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(602,'siwiktionary','mediawiki','wiktionary','local','si','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//si.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//si.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(603,'siwikibooks','mediawiki','wikibooks','local','si','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//si.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//si.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(604,'simplewiki','mediawiki','wikipedia','local','simple','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//simple.wikipedia.org/w/$1\";s:9:\"page_path\";s:30:\"//simple.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(605,'simplewiktionary','mediawiki','wiktionary','local','simple','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//simple.wiktionary.org/w/$1\";s:9:\"page_path\";s:31:\"//simple.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(606,'simplewikibooks','mediawiki','wikibooks','local','simple','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//simple.wikibooks.org/w/$1\";s:9:\"page_path\";s:30:\"//simple.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(607,'simplewikiquote','mediawiki','wikiquote','local','simple','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//simple.wikiquote.org/w/$1\";s:9:\"page_path\";s:30:\"//simple.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(608,'skwiki','mediawiki','wikipedia','local','sk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sk.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sk.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(609,'skwiktionary','mediawiki','wiktionary','local','sk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sk.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sk.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(610,'skwikibooks','mediawiki','wikibooks','local','sk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sk.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sk.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(611,'skwikiquote','mediawiki','wikiquote','local','sk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sk.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//sk.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(612,'skwikisource','mediawiki','wikisource','local','sk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sk.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//sk.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(613,'slwiki','mediawiki','wikipedia','local','sl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sl.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(614,'slwiktionary','mediawiki','wiktionary','local','sl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sl.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sl.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(615,'slwikibooks','mediawiki','wikibooks','local','sl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sl.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sl.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(616,'slwikiquote','mediawiki','wikiquote','local','sl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sl.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//sl.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(617,'slwikisource','mediawiki','wikisource','local','sl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sl.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//sl.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(618,'slwikiversity','mediawiki','wikiversity','local','sl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//sl.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//sl.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(619,'smwiki','mediawiki','wikipedia','local','sm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sm.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sm.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(620,'smwiktionary','mediawiki','wiktionary','local','sm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sm.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sm.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(621,'snwiki','mediawiki','wikipedia','local','sn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(622,'snwiktionary','mediawiki','wiktionary','local','sn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(623,'sowiki','mediawiki','wikipedia','local','so','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//so.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//so.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(624,'sowiktionary','mediawiki','wiktionary','local','so','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//so.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//so.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(625,'sqwiki','mediawiki','wikipedia','local','sq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sq.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sq.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(626,'sqwiktionary','mediawiki','wiktionary','local','sq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sq.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sq.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(627,'sqwikibooks','mediawiki','wikibooks','local','sq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sq.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sq.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(628,'sqwikinews','mediawiki','wikinews','local','sq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//sq.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//sq.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(629,'sqwikiquote','mediawiki','wikiquote','local','sq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sq.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//sq.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(630,'srwiki','mediawiki','wikipedia','local','sr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(631,'srwiktionary','mediawiki','wiktionary','local','sr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sr.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(632,'srwikibooks','mediawiki','wikibooks','local','sr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sr.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sr.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(633,'srwikinews','mediawiki','wikinews','local','sr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//sr.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//sr.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(634,'srwikiquote','mediawiki','wikiquote','local','sr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//sr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(635,'srwikisource','mediawiki','wikisource','local','sr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sr.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//sr.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(636,'srnwiki','mediawiki','wikipedia','local','srn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//srn.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//srn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(637,'sswiki','mediawiki','wikipedia','local','ss','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ss.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ss.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(638,'sswiktionary','mediawiki','wiktionary','local','ss','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ss.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ss.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(639,'stwiki','mediawiki','wikipedia','local','st','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//st.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//st.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(640,'stwiktionary','mediawiki','wiktionary','local','st','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//st.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//st.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(641,'stqwiki','mediawiki','wikipedia','local','stq','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//stq.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//stq.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(642,'suwiki','mediawiki','wikipedia','local','su','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//su.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//su.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(643,'suwiktionary','mediawiki','wiktionary','local','su','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//su.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//su.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(644,'suwikibooks','mediawiki','wikibooks','local','su','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//su.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//su.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(645,'suwikiquote','mediawiki','wikiquote','local','su','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//su.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//su.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(646,'svwiki','mediawiki','wikipedia','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sv.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(647,'svwiktionary','mediawiki','wiktionary','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sv.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sv.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(648,'svwikibooks','mediawiki','wikibooks','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sv.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sv.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(649,'svwikinews','mediawiki','wikinews','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//sv.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//sv.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(650,'svwikiquote','mediawiki','wikiquote','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sv.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//sv.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(651,'svwikisource','mediawiki','wikisource','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sv.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//sv.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(652,'svwikiversity','mediawiki','wikiversity','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//sv.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//sv.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(653,'svwikivoyage','mediawiki','wikivoyage','local','sv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sv.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//sv.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(654,'swwiki','mediawiki','wikipedia','local','sw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sw.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//sw.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(655,'swwiktionary','mediawiki','wiktionary','local','sw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//sw.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//sw.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(656,'swwikibooks','mediawiki','wikibooks','local','sw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sw.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//sw.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(657,'szlwiki','mediawiki','wikipedia','local','szl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//szl.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//szl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(658,'tawiki','mediawiki','wikipedia','local','ta','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ta.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ta.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(659,'tawiktionary','mediawiki','wiktionary','local','ta','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ta.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ta.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(660,'tawikibooks','mediawiki','wikibooks','local','ta','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ta.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ta.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(661,'tawikinews','mediawiki','wikinews','local','ta','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//ta.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//ta.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(662,'tawikiquote','mediawiki','wikiquote','local','ta','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ta.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ta.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(663,'tawikisource','mediawiki','wikisource','local','ta','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ta.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//ta.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(664,'tewiki','mediawiki','wikipedia','local','te','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//te.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//te.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(665,'tewiktionary','mediawiki','wiktionary','local','te','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//te.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//te.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(666,'tewikibooks','mediawiki','wikibooks','local','te','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//te.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//te.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(667,'tewikiquote','mediawiki','wikiquote','local','te','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//te.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//te.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(668,'tewikisource','mediawiki','wikisource','local','te','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//te.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//te.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(669,'tetwiki','mediawiki','wikipedia','local','tet','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tet.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//tet.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(670,'tgwiki','mediawiki','wikipedia','local','tg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tg.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tg.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(671,'tgwiktionary','mediawiki','wiktionary','local','tg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tg.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tg.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(672,'tgwikibooks','mediawiki','wikibooks','local','tg','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tg.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//tg.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(673,'thwiki','mediawiki','wikipedia','local','th','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//th.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//th.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(674,'thwiktionary','mediawiki','wiktionary','local','th','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//th.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//th.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(675,'thwikibooks','mediawiki','wikibooks','local','th','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//th.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//th.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(676,'thwikinews','mediawiki','wikinews','local','th','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//th.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//th.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(677,'thwikiquote','mediawiki','wikiquote','local','th','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//th.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//th.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(678,'thwikisource','mediawiki','wikisource','local','th','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//th.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//th.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(679,'tiwiki','mediawiki','wikipedia','local','ti','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ti.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ti.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(680,'tiwiktionary','mediawiki','wiktionary','local','ti','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ti.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ti.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(681,'tkwiki','mediawiki','wikipedia','local','tk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tk.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tk.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(682,'tkwiktionary','mediawiki','wiktionary','local','tk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tk.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tk.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(683,'tkwikibooks','mediawiki','wikibooks','local','tk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tk.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//tk.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(684,'tkwikiquote','mediawiki','wikiquote','local','tk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tk.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//tk.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(685,'tlwiki','mediawiki','wikipedia','local','tl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tl.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(686,'tlwiktionary','mediawiki','wiktionary','local','tl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tl.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tl.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(687,'tlwikibooks','mediawiki','wikibooks','local','tl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tl.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//tl.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(688,'tnwiki','mediawiki','wikipedia','local','tn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tn.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tn.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(689,'tnwiktionary','mediawiki','wiktionary','local','tn','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tn.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tn.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(690,'towiki','mediawiki','wikipedia','local','to','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//to.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//to.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(691,'towiktionary','mediawiki','wiktionary','local','to','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//to.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//to.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(692,'tpiwiki','mediawiki','wikipedia','local','tpi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tpi.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//tpi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(693,'tpiwiktionary','mediawiki','wiktionary','local','tpi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//tpi.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//tpi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(694,'trwiki','mediawiki','wikipedia','local','tr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tr.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tr.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(695,'trwiktionary','mediawiki','wiktionary','local','tr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tr.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tr.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(696,'trwikibooks','mediawiki','wikibooks','local','tr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tr.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//tr.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(697,'trwikinews','mediawiki','wikinews','local','tr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//tr.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//tr.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(698,'trwikiquote','mediawiki','wikiquote','local','tr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tr.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//tr.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(699,'trwikisource','mediawiki','wikisource','local','tr','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tr.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//tr.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(700,'tswiki','mediawiki','wikipedia','local','ts','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ts.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ts.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(701,'tswiktionary','mediawiki','wiktionary','local','ts','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ts.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ts.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(702,'ttwiki','mediawiki','wikipedia','local','tt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tt.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tt.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(703,'ttwiktionary','mediawiki','wiktionary','local','tt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tt.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tt.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(704,'ttwikibooks','mediawiki','wikibooks','local','tt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tt.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//tt.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(705,'ttwikiquote','mediawiki','wikiquote','local','tt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tt.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//tt.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(706,'tumwiki','mediawiki','wikipedia','local','tum','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tum.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//tum.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(707,'twwiki','mediawiki','wikipedia','local','tw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tw.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tw.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(708,'twwiktionary','mediawiki','wiktionary','local','tw','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tw.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//tw.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(709,'tywiki','mediawiki','wikipedia','local','ty','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ty.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ty.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(710,'udmwiki','mediawiki','wikipedia','local','udm','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//udm.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//udm.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(711,'ugwiki','mediawiki','wikipedia','local','ug','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ug.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ug.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(712,'ugwiktionary','mediawiki','wiktionary','local','ug','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ug.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ug.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(713,'ugwikibooks','mediawiki','wikibooks','local','ug','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ug.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ug.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(714,'ugwikiquote','mediawiki','wikiquote','local','ug','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ug.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ug.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(715,'ukwiki','mediawiki','wikipedia','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uk.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//uk.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(716,'ukwiktionary','mediawiki','wiktionary','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//uk.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//uk.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(717,'ukwikibooks','mediawiki','wikibooks','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uk.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//uk.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(718,'ukwikinews','mediawiki','wikinews','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//uk.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//uk.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(719,'ukwikiquote','mediawiki','wikiquote','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uk.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//uk.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(720,'ukwikisource','mediawiki','wikisource','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//uk.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//uk.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(721,'urwiki','mediawiki','wikipedia','local','ur','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ur.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ur.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(722,'urwiktionary','mediawiki','wiktionary','local','ur','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ur.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//ur.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(723,'urwikibooks','mediawiki','wikibooks','local','ur','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ur.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//ur.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(724,'urwikiquote','mediawiki','wikiquote','local','ur','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ur.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//ur.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(725,'uzwiki','mediawiki','wikipedia','local','uz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uz.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//uz.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(726,'uzwiktionary','mediawiki','wiktionary','local','uz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//uz.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//uz.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(727,'uzwikibooks','mediawiki','wikibooks','local','uz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uz.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//uz.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(728,'uzwikiquote','mediawiki','wikiquote','local','uz','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uz.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//uz.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(729,'vewiki','mediawiki','wikipedia','local','ve','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ve.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ve.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(730,'vecwiki','mediawiki','wikipedia','local','vec','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vec.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//vec.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(731,'vecwikisource','mediawiki','wikisource','local','vec','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//vec.wikisource.org/w/$1\";s:9:\"page_path\";s:28:\"//vec.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(732,'vepwiki','mediawiki','wikipedia','local','vep','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vep.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//vep.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(733,'viwiki','mediawiki','wikipedia','local','vi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//vi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//vi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(734,'viwiktionary','mediawiki','wiktionary','local','vi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//vi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(735,'viwikibooks','mediawiki','wikibooks','local','vi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//vi.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//vi.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(736,'viwikiquote','mediawiki','wikiquote','local','vi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//vi.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//vi.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(737,'viwikisource','mediawiki','wikisource','local','vi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vi.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//vi.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(738,'vlswiki','mediawiki','wikipedia','local','vls','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vls.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//vls.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(739,'vowiki','mediawiki','wikipedia','local','vo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//vo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//vo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(740,'vowiktionary','mediawiki','wiktionary','local','vo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//vo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(741,'vowikibooks','mediawiki','wikibooks','local','vo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//vo.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//vo.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(742,'vowikiquote','mediawiki','wikiquote','local','vo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//vo.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//vo.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(743,'wawiki','mediawiki','wikipedia','local','wa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//wa.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//wa.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(744,'wawiktionary','mediawiki','wiktionary','local','wa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//wa.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//wa.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(745,'wawikibooks','mediawiki','wikibooks','local','wa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//wa.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//wa.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(746,'warwiki','mediawiki','wikipedia','local','war','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//war.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//war.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(747,'wowiki','mediawiki','wikipedia','local','wo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//wo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//wo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(748,'wowiktionary','mediawiki','wiktionary','local','wo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//wo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//wo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(749,'wowikiquote','mediawiki','wikiquote','local','wo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//wo.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//wo.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(750,'wuuwiki','mediawiki','wikipedia','local','wuu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//wuu.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//wuu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(751,'xalwiki','mediawiki','wikipedia','local','xal','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//xal.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//xal.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(752,'xhwiki','mediawiki','wikipedia','local','xh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//xh.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//xh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(753,'xhwiktionary','mediawiki','wiktionary','local','xh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//xh.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//xh.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(754,'xhwikibooks','mediawiki','wikibooks','local','xh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//xh.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//xh.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(755,'xmfwiki','mediawiki','wikipedia','local','xmf','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//xmf.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//xmf.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(756,'yiwiki','mediawiki','wikipedia','local','yi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//yi.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//yi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(757,'yiwiktionary','mediawiki','wiktionary','local','yi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//yi.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//yi.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(758,'yiwikisource','mediawiki','wikisource','local','yi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//yi.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//yi.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(759,'yowiki','mediawiki','wikipedia','local','yo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//yo.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//yo.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(760,'yowiktionary','mediawiki','wiktionary','local','yo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//yo.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//yo.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(761,'yowikibooks','mediawiki','wikibooks','local','yo','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//yo.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//yo.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(762,'zawiki','mediawiki','wikipedia','local','za','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//za.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//za.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(763,'zawiktionary','mediawiki','wiktionary','local','za','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//za.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//za.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(764,'zawikibooks','mediawiki','wikibooks','local','za','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//za.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//za.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(765,'zawikiquote','mediawiki','wikiquote','local','za','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//za.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//za.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(766,'zeawiki','mediawiki','wikipedia','local','zea','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//zea.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//zea.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(767,'zhwiki','mediawiki','wikipedia','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//zh.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//zh.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(768,'zhwiktionary','mediawiki','wiktionary','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//zh.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//zh.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(769,'zhwikibooks','mediawiki','wikibooks','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//zh.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//zh.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(770,'zhwikinews','mediawiki','wikinews','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:22:\"//zh.wikinews.org/w/$1\";s:9:\"page_path\";s:25:\"//zh.wikinews.org/wiki/$1\";}}',0,'a:0:{}'),(771,'zhwikiquote','mediawiki','wikiquote','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//zh.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//zh.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(772,'zhwikisource','mediawiki','wikisource','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//zh.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//zh.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(773,'zh_classicalwiki','mediawiki','wikipedia','local','zh-classical','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:33:\"//zh-classical.wikipedia.org/w/$1\";s:9:\"page_path\";s:36:\"//zh-classical.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(774,'zh_min_nanwiki','mediawiki','wikipedia','local','zh-min-nan','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:31:\"//zh-min-nan.wikipedia.org/w/$1\";s:9:\"page_path\";s:34:\"//zh-min-nan.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(775,'zh_min_nanwiktionary','mediawiki','wiktionary','local','zh-min-nan','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:32:\"//zh-min-nan.wiktionary.org/w/$1\";s:9:\"page_path\";s:35:\"//zh-min-nan.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(776,'zh_min_nanwikibooks','mediawiki','wikibooks','local','zh-min-nan','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:31:\"//zh-min-nan.wikibooks.org/w/$1\";s:9:\"page_path\";s:34:\"//zh-min-nan.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(777,'zh_min_nanwikiquote','mediawiki','wikiquote','local','zh-min-nan','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:31:\"//zh-min-nan.wikiquote.org/w/$1\";s:9:\"page_path\";s:34:\"//zh-min-nan.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(778,'zh_min_nanwikisource','mediawiki','wikisource','local','zh-min-nan','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:32:\"//zh-min-nan.wikisource.org/w/$1\";s:9:\"page_path\";s:35:\"//zh-min-nan.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(779,'zh_yuewiki','mediawiki','wikipedia','local','zh-yue','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//zh-yue.wikipedia.org/w/$1\";s:9:\"page_path\";s:30:\"//zh-yue.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(780,'zuwiki','mediawiki','wikipedia','local','zu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//zu.wikipedia.org/w/$1\";s:9:\"page_path\";s:26:\"//zu.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(781,'zuwiktionary','mediawiki','wiktionary','local','zu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//zu.wiktionary.org/w/$1\";s:9:\"page_path\";s:27:\"//zu.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(782,'zuwikibooks','mediawiki','wikibooks','local','zu','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//zu.wikibooks.org/w/$1\";s:9:\"page_path\";s:26:\"//zu.wikibooks.org/wiki/$1\";}}',0,'a:0:{}'),(783,'aswikisource','mediawiki','wikisource','local','as','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//as.wikisource.org/w/$1\";s:9:\"page_path\";s:27:\"//as.wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(784,'eswikivoyage','mediawiki','wikivoyage','local','es','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//es.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//es.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(785,'kowikiversity','mediawiki','wikiversity','local','ko','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//ko.wikiversity.org/w/$1\";s:9:\"page_path\";s:28:\"//ko.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(786,'minwiki','mediawiki','wikipedia','local','min','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//min.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//min.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(787,'plwikivoyage','mediawiki','wikivoyage','local','pl','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pl.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//pl.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(788,'ptwikivoyage','mediawiki','wikivoyage','local','pt','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//pt.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//pt.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(789,'rowikivoyage','mediawiki','wikivoyage','local','ro','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ro.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//ro.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(790,'sawikiquote','mediawiki','wikiquote','local','sa','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//sa.wikiquote.org/w/$1\";s:9:\"page_path\";s:26:\"//sa.wikiquote.org/wiki/$1\";}}',0,'a:0:{}'),(791,'elwikivoyage','mediawiki','wikivoyage','local','el','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//el.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//el.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(792,'hewikivoyage','mediawiki','wikivoyage','local','he','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//he.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//he.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(793,'ukwikivoyage','mediawiki','wikivoyage','local','uk','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//uk.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//uk.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(794,'vecwiktionary','mediawiki','wiktionary','local','vec','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//vec.wiktionary.org/w/$1\";s:9:\"page_path\";s:28:\"//vec.wiktionary.org/wiki/$1\";}}',0,'a:0:{}'),(795,'wikidatawiki','mediawiki','wikidata','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//www.wikidata.org/w/$1\";s:9:\"page_path\";s:26:\"//www.wikidata.org/wiki/$1\";}}',0,'a:0:{}'),(796,'tyvwiki','mediawiki','wikipedia','local','tyv','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//tyv.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//tyv.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(797,'viwikivoyage','mediawiki','wikivoyage','local','vi','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//vi.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//vi.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'),(798,'advisorywiki','mediawiki','advisory','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//advisory.wikimedia.org/w/$1\";s:9:\"page_path\";s:32:\"//advisory.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(799,'arwikimedia','mediawiki','arwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ar.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ar.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(800,'arbcom_dewiki','mediawiki','arbcom-de','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//arbcom.de.wikipedia.org/w/$1\";s:9:\"page_path\";s:33:\"//arbcom.de.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(801,'arbcom_enwiki','mediawiki','arbcom-en','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//arbcom.en.wikipedia.org/w/$1\";s:9:\"page_path\";s:33:\"//arbcom.en.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(802,'arbcom_fiwiki','mediawiki','arbcom-fi','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//arbcom.fi.wikipedia.org/w/$1\";s:9:\"page_path\";s:33:\"//arbcom.fi.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(803,'arbcom_nlwiki','mediawiki','arbcom-nl','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//arbcom.nl.wikipedia.org/w/$1\";s:9:\"page_path\";s:33:\"//arbcom.nl.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(804,'auditcomwiki','mediawiki','auditcom','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//auditcom.wikimedia.org/w/$1\";s:9:\"page_path\";s:32:\"//auditcom.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(805,'bdwikimedia','mediawiki','bdwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//bd.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//bd.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(806,'bewikimedia','mediawiki','bewikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//be.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//be.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(807,'betawikiversity','mediawiki','betawikiversity','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//beta.wikiversity.org/w/$1\";s:9:\"page_path\";s:30:\"//beta.wikiversity.org/wiki/$1\";}}',0,'a:0:{}'),(808,'boardwiki','mediawiki','board','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//board.wikimedia.org/w/$1\";s:9:\"page_path\";s:29:\"//board.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(809,'boardgovcomwiki','mediawiki','boardgovcom','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:32:\"//boardgovcom.wikimedia.org/w/$1\";s:9:\"page_path\";s:35:\"//boardgovcom.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(810,'brwikimedia','mediawiki','brwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//br.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//br.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(811,'chairwiki','mediawiki','chair','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//chair.wikimedia.org/w/$1\";s:9:\"page_path\";s:29:\"//chair.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(812,'chapcomwiki','mediawiki','chapcom','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//chapcom.wikimedia.org/w/$1\";s:9:\"page_path\";s:31:\"//chapcom.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(813,'checkuserwiki','mediawiki','checkuser','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//checkuser.wikimedia.org/w/$1\";s:9:\"page_path\";s:33:\"//checkuser.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(814,'cowikimedia','mediawiki','cowikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//co.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//co.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(815,'collabwiki','mediawiki','collab','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//collab.wikimedia.org/w/$1\";s:9:\"page_path\";s:30:\"//collab.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(816,'commonswiki','mediawiki','commons','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//commons.wikimedia.org/w/$1\";s:9:\"page_path\";s:31:\"//commons.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(817,'dkwikimedia','mediawiki','dkwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//dk.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//dk.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(818,'donatewiki','mediawiki','donate','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//donate.wikimedia.org/w/$1\";s:9:\"page_path\";s:30:\"//donate.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(819,'etwikimedia','mediawiki','etwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//et.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//et.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(820,'execwiki','mediawiki','exec','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//exec.wikimedia.org/w/$1\";s:9:\"page_path\";s:28:\"//exec.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(821,'fdcwiki','mediawiki','fdc','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//fdc.wikimedia.org/w/$1\";s:9:\"page_path\";s:27:\"//fdc.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(822,'fiwikimedia','mediawiki','fiwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//fi.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//fi.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(823,'foundationwiki','mediawiki','foundation','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//wikimediafoundation.org/w/$1\";s:9:\"page_path\";s:33:\"//wikimediafoundation.org/wiki/$1\";}}',0,'a:0:{}'),(824,'grantswiki','mediawiki','grants','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//grants.wikimedia.org/w/$1\";s:9:\"page_path\";s:30:\"//grants.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(825,'iegcomwiki','mediawiki','iegcom','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//iegcom.wikimedia.org/w/$1\";s:9:\"page_path\";s:30:\"//iegcom.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(826,'ilwikimedia','mediawiki','ilwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//il.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//il.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(827,'incubatorwiki','mediawiki','incubator','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//incubator.wikimedia.org/w/$1\";s:9:\"page_path\";s:33:\"//incubator.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(828,'internalwiki','mediawiki','internal','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//internal.wikimedia.org/w/$1\";s:9:\"page_path\";s:32:\"//internal.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(829,'loginwiki','mediawiki','login','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//login.wikimedia.org/w/$1\";s:9:\"page_path\";s:29:\"//login.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(830,'mediawikiwiki','mediawiki','mediawiki','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//www.mediawiki.org/w/$1\";s:9:\"page_path\";s:27:\"//www.mediawiki.org/wiki/$1\";}}',0,'a:0:{}'),(831,'metawiki','mediawiki','meta','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//meta.wikimedia.org/w/$1\";s:9:\"page_path\";s:28:\"//meta.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(832,'mkwikimedia','mediawiki','mkwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mk.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mk.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(833,'movementroleswiki','mediawiki','movementroles','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//movementroles.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//movementroles.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(834,'mxwikimedia','mediawiki','mxwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//mx.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//mx.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(835,'nlwikimedia','mediawiki','nlwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nl.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//nl.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(836,'nowikimedia','mediawiki','nowikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//no.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//no.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(837,'noboard_chapterswikimedia','mediawiki','noboard-chapterswikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:37:\"//noboard.chapters.wikimedia.org/w/$1\";s:9:\"page_path\";s:40:\"//noboard.chapters.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(838,'nostalgiawiki','mediawiki','nostalgia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//nostalgia.wikipedia.org/w/$1\";s:9:\"page_path\";s:33:\"//nostalgia.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(839,'nycwikimedia','mediawiki','nycwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//nyc.wikimedia.org/w/$1\";s:9:\"page_path\";s:27:\"//nyc.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(840,'nzwikimedia','mediawiki','nzwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//nz.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//nz.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(841,'officewiki','mediawiki','office','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:27:\"//office.wikimedia.org/w/$1\";s:9:\"page_path\";s:30:\"//office.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(842,'ombudsmenwiki','mediawiki','ombudsmen','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//ombudsmen.wikimedia.org/w/$1\";s:9:\"page_path\";s:33:\"//ombudsmen.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(843,'otrs_wikiwiki','mediawiki','otrs-wiki','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//otrs-wiki.wikimedia.org/w/$1\";s:9:\"page_path\";s:33:\"//otrs-wiki.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(844,'outreachwiki','mediawiki','outreach','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//outreach.wikimedia.org/w/$1\";s:9:\"page_path\";s:32:\"//outreach.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(845,'pa_uswikimedia','mediawiki','pa-uswikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//pa-us.wikimedia.org/w/$1\";s:9:\"page_path\";s:29:\"//pa-us.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(846,'plwikimedia','mediawiki','plwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//pl.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//pl.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(847,'qualitywiki','mediawiki','quality','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//quality.wikimedia.org/w/$1\";s:9:\"page_path\";s:31:\"//quality.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(848,'rswikimedia','mediawiki','rswikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//rs.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//rs.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(849,'ruwikimedia','mediawiki','ruwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ru.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ru.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(850,'sewikimedia','mediawiki','sewikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//se.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//se.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(851,'searchcomwiki','mediawiki','searchcom','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//searchcom.wikimedia.org/w/$1\";s:9:\"page_path\";s:33:\"//searchcom.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(852,'sourceswiki','mediawiki','sources','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:21:\"//wikisource.org/w/$1\";s:9:\"page_path\";s:24:\"//wikisource.org/wiki/$1\";}}',0,'a:0:{}'),(853,'spcomwiki','mediawiki','spcom','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//spcom.wikimedia.org/w/$1\";s:9:\"page_path\";s:29:\"//spcom.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(854,'specieswiki','mediawiki','species','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//species.wikimedia.org/w/$1\";s:9:\"page_path\";s:31:\"//species.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(855,'stewardwiki','mediawiki','steward','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:28:\"//steward.wikimedia.org/w/$1\";s:9:\"page_path\";s:31:\"//steward.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(856,'strategywiki','mediawiki','strategy','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:29:\"//strategy.wikimedia.org/w/$1\";s:9:\"page_path\";s:32:\"//strategy.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(857,'tenwiki','mediawiki','ten','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//ten.wikipedia.org/w/$1\";s:9:\"page_path\";s:27:\"//ten.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(858,'testwiki','mediawiki','test','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//test.wikipedia.org/w/$1\";s:9:\"page_path\";s:28:\"//test.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(859,'test2wiki','mediawiki','test2','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//test2.wikipedia.org/w/$1\";s:9:\"page_path\";s:29:\"//test2.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(860,'testwikidatawiki','mediawiki','testwikidata','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//test.wikidata.org/w/$1\";s:9:\"page_path\";s:27:\"//test.wikidata.org/wiki/$1\";}}',0,'a:0:{}'),(861,'trwikimedia','mediawiki','trwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//tr.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//tr.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(862,'transitionteamwiki','mediawiki','transitionteam','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:35:\"//transitionteam.wikimedia.org/w/$1\";s:9:\"page_path\";s:38:\"//transitionteam.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(863,'uawikimedia','mediawiki','uawikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ua.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ua.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(864,'ukwikimedia','mediawiki','ukwikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//uk.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//uk.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(865,'usabilitywiki','mediawiki','usability','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:30:\"//usability.wikimedia.org/w/$1\";s:9:\"page_path\";s:33:\"//usability.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(866,'vewikimedia','mediawiki','vewikimedia','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:23:\"//ve.wikimedia.org/w/$1\";s:9:\"page_path\";s:26:\"//ve.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(867,'votewiki','mediawiki','vote','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:25:\"//vote.wikimedia.org/w/$1\";s:9:\"page_path\";s:28:\"//vote.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(868,'wg_enwiki','mediawiki','wg-en','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:26:\"//wg.en.wikipedia.org/w/$1\";s:9:\"page_path\";s:29:\"//wg.en.wikipedia.org/wiki/$1\";}}',0,'a:0:{}'),(869,'wikimania2005wiki','mediawiki','wikimania2005','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2005.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2005.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(870,'wikimania2006wiki','mediawiki','wikimania2006','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2006.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2006.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(871,'wikimania2007wiki','mediawiki','wikimania2007','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2007.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2007.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(872,'wikimania2008wiki','mediawiki','wikimania2008','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2008.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2008.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(873,'wikimania2009wiki','mediawiki','wikimania2009','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2009.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2009.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(874,'wikimania2010wiki','mediawiki','wikimania2010','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2010.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2010.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(875,'wikimania2011wiki','mediawiki','wikimania2011','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2011.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2011.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(876,'wikimania2012wiki','mediawiki','wikimania2012','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2012.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2012.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(877,'wikimania2013wiki','mediawiki','wikimania2013','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2013.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2013.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(878,'wikimania2014wiki','mediawiki','wikimania2014','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimania2014.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimania2014.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(879,'wikimaniateamwiki','mediawiki','wikimaniateam','local','en','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:34:\"//wikimaniateam.wikimedia.org/w/$1\";s:9:\"page_path\";s:37:\"//wikimaniateam.wikimedia.org/wiki/$1\";}}',0,'a:0:{}'),(880,'zhwikivoyage','mediawiki','wikivoyage','local','zh','','.','a:1:{s:5:\"paths\";a:2:{s:9:\"file_path\";s:24:\"//zh.wikivoyage.org/w/$1\";s:9:\"page_path\";s:27:\"//zh.wikivoyage.org/wiki/$1\";}}',0,'a:0:{}'); /*!40000 ALTER TABLE `sites` ENABLE KEYS */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2014-04-22 1:57:49 Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/wikidatawiki-20140508-index.html000066400000000000000000000400601444772566300311000ustar00rootroot00000000000000 wikidatawiki dump progress on 20140508

wikidatawiki dump progress on 20140508

This is the Wikimedia dump service. Please read the copyrights information. See Meta:Data dumps for documentation on the provided data formats.

See all databases list.

Last dumped on 2014-04-20

Dump complete

Verify downloaded files against the MD5 checksums to check for corrupted files.

Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/wikidatawiki-index-old.html000066400000000000000000000041501444772566300307530ustar00rootroot00000000000000 Index of /wikidatawiki/

Index of /wikidatawiki/

NameLast ModifiedSizeType
Parent Directory/ -  Directory
20131221/2013-Dec-27 01:19:05-  Directory
20140106/2014-Jan-12 21:10:01-  Directory
20140123/2014-Jan-29 03:39:23-  Directory
20140210/2014-Feb-16 16:38:40-  Directory
latest/2014-Feb-16 16:38:40-  Directory
lighttpd/1.4.26
Wikidata-Toolkit-0.14.6/wdtk-dumpfiles/src/test/resources/wikidatawiki-index.html000066400000000000000000000033531444772566300302030ustar00rootroot00000000000000 Index of /wikidatawiki/

Index of /wikidatawiki/


../
20140508/                                          16-May-2014 03:50                   -
20140526/                                          03-Jun-2014 13:30                   -
20140612/                                          21-Jun-2014 02:16                   -
20140705/                                          16-Jul-2014 18:51                   -
20140731/                                          04-Aug-2014 04:57                   -
20140804/                                          16-Aug-2014 19:04                   -
20140823/                                          05-Sep-2014 12:17                   -
20140912/                                          25-Sep-2014 23:37                   -
20141009/                                          24-Oct-2014 07:27                   -
20141106/                                          22-Nov-2014 00:28                   -
20141205/                                          08-Dec-2014 04:25                   -
20141208/                                          10-Dec-2014 17:59                   -
20150113/                                          18-Jan-2015 11:16                   -
latest/                                            18-Jan-2015 11:16                   -

Wikidata-Toolkit-0.14.6/wdtk-examples/000077500000000000000000000000001444772566300175715ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/LICENSE.txt000066400000000000000000000261351444772566300214230ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-examples/README.md000066400000000000000000000174361444772566300210630ustar00rootroot00000000000000Wikidata Toolkit Examples ========================= This module contains example programs that show some of the features of Wikidata Toolkit. Overview and Settings --------------------- A detailed guide to each of the examples is given below. Many examples process data dumps exported by Wikidata. In most cases, the example only contains the actual processing code that does something interesting. The code for downloading dumps and iterating over them is in the ExampleHelpers.java class, which is used in many examples for common tasks. You can edit the static members in ExampleHelpers to select which dumps should be used (the data is available in several formats which may be more or less recent and more or less comprehensive). You can also switch to offline mode there: then only the files downloaded previously will be used. This is convenient for testing to avoid downloading new files when you don't really need absolutely current data. By default, the code will fetch the most recent JSON dumps from the Web. Some examples write their output to files. These files are put into the subdirectory "results" under the directory from where the application is run. Files in CSV format can be loaded in any spreadsheet tool to make diagrams, for example. Guide to the Available Examples ------------------------------- Ordered roughly from basic to advanced/specific. #### EntityStatisticsProcessor.java #### This program processes entities (items and properties) to collect some basic statistics. It counts how many items and properties there are, the number of labels, descriptions, and aliases, and the number of statements. This code might be useful to get to know the basic data structures where these things are stored. The example also counts the usage of each property in more details: its use in the main part of statements, in qualifiers, and in references is counted separately. The results for this are written into a CSV file in the end. #### FetchOnlineDataExample.java #### This program shows how to fetch live data from wikidata.org via the Web API. This can be used with any other Wikibase site as well. It is not practical to fetch all data in this way, but it can be very convenient to get some data directly even when processing a dump (since the dump can only be read in sequence). #### EditOnlineDataExample.java #### This program shows how to create and modify live data on test.wikidata.org via the Web API. This can be used with any other Wikibase site as well. The example first creates a new item with some starting data, then adds some additional statements, and finally modifies and deletes existing statements. All data modifications automatically use revision ids to make sure that no edit conflicts occur (and we don't modify/delete data that is different from what we expect). #### LocalDumpFileExample.java #### This program shows how to process a data dump that is available in a local file, rather than being automatically downloaded (and possibly cached) from the Wikimedia site. #### GreatestNumberProcessor.java #### This simple program looks at all values of a number property to find the item with the greatest value. It will print the result to the console. In most cases, the item with the greatest number is fairly early in the data export, so watching the program work is not too exciting, but it shows how to read a single property value to do something with it. The property that is used is defined by a constant in the code and can be changed to see some other greatest values. #### LifeExpectancyProcessor.java #### This program processes items to compute the average life expectancy of people on Wikidata. It shows how to get details (here: year numbers) of specific statement values for specific properties (here we use Wikidata's P569 "birth date" and P570 "death date"). The results are stored in a CSV file that shows average life expectancy by year of birth. The overall average is also printed to the output. #### WorldMapProcessor.java #### This program generates images of world maps based on the locations of Wikidata items, and stores the result in PNG files. The example builds several maps, for Wikidata as a whole and for several big Wikipedias (counting only items with an article in there). The code offers easy-to-adjust parameters for the size of the output images, the Wikimedia projects to consider, and the scale of the color values. [Wikidata world maps for June 2015](https://ddll.inf.tu-dresden.de/web/Wikidata/Maps-06-2015/en) #### GenderRatioProcessor.java #### This program uses Wikidata to analyse the number of articles that exist on certain topics in different Wikimedia projects (esp. in Wikipedias). In particular, it counts the number of articles about humans and humans of a specific gender (female, male, etc.). Can be used to estimate the gender balance of various Wikipedias. The results are stored in a CSV file (all projects x all genders), but for the largest projects they are also printed to the output. This example is inspired by Max Klein's work on this topic. [Related blog post by Max Klein](http://notconfusing.com/sex-ratios-in-wikidata-part-iii/) #### JsonSerializationProcessor.java #### This program creates a JSON file that contains English language terms, birthdate, occupation, and image for all people on Wikidata who were born in Dresden (the code can easily be modified to make a different selection). The example shows how to serialize Wikidata Toolkit objects in JSON, how to select item documents by a property, and how to filter documents to ignore some of the data. The resulting file is small (less than 1M). #### SitelinksExample.java #### This program shows how to get information about the site links that are used in Wikidata dumps. The links to Wikimedia projects use keys like "enwiki" for English Wikipedia or "hewikivoyage" for Hebrew WikiVoyage. To find out the meaning of these codes, and to create URLs for the articles on these projects, Wikidata Toolkit includes some simple functions that download and process the site links information for a given project. This example shows how to use this functionality. #### ClassPropertyUsageExample.java #### This advanced program analyses the use of properties and classes on Wikidata, and creates output that can be used in the [Miga data browser](http://migadv.com/). You can see the result online at http://tools.wmflabs.org/wikidata-exports/miga/. The program is slightly more complex, involving several processing steps and additional code for formatting output for CSV files. #### RdfSerializationExample.java #### This program creates an RDF export. You can also do this directly using the command line client. The purpose of this program is just to show how this could be done in code, e.g., to implement additional pre-processing before the RDF serialisation. Other Helper Code ----------------- #### ExampleHelpers.java #### This class provides static helper methods to iterate through dumps, to configure the desired logging behaviour, and to write files to the "results" directory. It also allows you to change some global settings that will affect most examples. The code is of interest if you want to find out how to build a standalone application that includes all aspects without relying on the example module. #### EntityTimerProcessor.java #### This is a helper class that is used in all examples to print basic timer information and to provide support for having a timeout (cleanly abort processing after a fixed time, even if the dump would take much longer to complete; useful for testing). It should not be of primary interest for learning how to use Wikidata Toolkit, but you can have a look to find out how to use our Timer class. Additional Resources -------------------- * [Wikidata Toolkit homepage](https://www.mediawiki.org/wiki/Wikidata_Toolkit) * [Wikidata Toolkit Javadocs](http://wikidata.github.io/Wikidata-Toolkit/) Wikidata-Toolkit-0.14.6/wdtk-examples/pom.xml000066400000000000000000000044551444772566300211160ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-examples jar Wikidata Toolkit Examples This module contains examples to use the Wikidata Toolkit. This module also contains classes that are used to run benchmarks. ${project.groupId} wdtk-datamodel ${project.version} ${project.groupId} wdtk-dumpfiles ${project.version} ${project.groupId} wdtk-rdf ${project.version} ${project.groupId} wdtk-util ${project.version} ${project.groupId} wdtk-wikibaseapi ${project.version} ${project.groupId} wdtk-storage ${project.version} org.slf4j slf4j-log4j12 ${slf4jVersion} org.apache.commons commons-compress ${apacheCommonsCompressVersion} org.codehaus.mojo cobertura-maven-plugin 2.7 true true Wikidata-Toolkit-0.14.6/wdtk-examples/src/000077500000000000000000000000001444772566300203605ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/000077500000000000000000000000001444772566300213045ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/000077500000000000000000000000001444772566300222255ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/000077500000000000000000000000001444772566300230145ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/000077500000000000000000000000001444772566300246115ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300255625ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/000077500000000000000000000000001444772566300274005ustar00rootroot00000000000000ClassPropertyUsageAnalyzer.java000066400000000000000000000704361444772566300355030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import org.apache.commons.lang3.tuple.ImmutablePair; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TermedDocument; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; /** * This advanced example analyses the use of properties and classes in a dump * file, and stores the results in two CSV files. These files can be used with * the Miga data viewer to create the Wikidata Class and * Properties browser. You can view the settings for configuring Miga in the * Miga * directory for this app. *

* However, you can also view the files in any other tool that processes CSV. * The only peculiarity is that some fields in CSV contain lists of items as * values, with items separated by "@". This is not supported by most * applications since it does not fit into the tabular data model of CSV. *

* The code is somewhat complex and not always clean. It should be considered as * an advanced example, not as a first introduction. * * @author Markus Kroetzsch * */ public class ClassPropertyUsageAnalyzer implements EntityDocumentProcessor { /** * Set of top-level classes (without a superclass) that should be considered * during processing. *

* We use this list since our one-pass processing may fail to collect labels * for some classes, if they are used as classes only after they occur in * the dump. This can only occur for top-level classes (since a * "subclass of" statement would already be a use as a class). This list * tries to make sure that some more labels are collected for known * top-level classes. It is not a problem if some of these classes are not * really "top level" in the current dump. */ private static final HashSet TOP_LEVEL_CLASSES = new HashSet<>(); static { TOP_LEVEL_CLASSES.add("Q35120"); // Entity TOP_LEVEL_CLASSES.add("Q14897293"); // Fictional entity TOP_LEVEL_CLASSES.add("Q726"); // horse TOP_LEVEL_CLASSES.add("Q12567"); // Vikings TOP_LEVEL_CLASSES.add("Q32099"); TOP_LEVEL_CLASSES.add("Q47883"); TOP_LEVEL_CLASSES.add("Q188913"); TOP_LEVEL_CLASSES.add("Q236209"); TOP_LEVEL_CLASSES.add("Q459297"); TOP_LEVEL_CLASSES.add("Q786014"); TOP_LEVEL_CLASSES.add("Q861951"); TOP_LEVEL_CLASSES.add("Q7045"); TOP_LEVEL_CLASSES.add("Q31579"); TOP_LEVEL_CLASSES.add("Q35054"); TOP_LEVEL_CLASSES.add("Q39825"); TOP_LEVEL_CLASSES.add("Q81513"); TOP_LEVEL_CLASSES.add("Q102496"); TOP_LEVEL_CLASSES.add("Q159661"); TOP_LEVEL_CLASSES.add("Q1130491"); TOP_LEVEL_CLASSES.add("Q2022036"); TOP_LEVEL_CLASSES.add("Q2198291"); TOP_LEVEL_CLASSES.add("Q3034652"); TOP_LEVEL_CLASSES.add("Q3505845"); } /** * Class to record the use of some class item or property. * * @author Markus Kroetzsch * */ private abstract static class UsageRecord { /** * Number of items using this entity. For properties, this is the number * of items with such a property. For class items, this is the number of * instances of this class. */ public int itemCount = 0; /** * Map that records how many times certain properties are used on items * that use this entity (where "use" has the meaning explained for * {@link UsageRecord#itemCount}). */ public HashMap propertyCoCounts = new HashMap<>(); } /** * Class to record the usage of a property in the data. * * @author Markus Kroetzsch * */ private static class PropertyRecord extends UsageRecord { /** * Number of statements with this property. */ public int statementCount = 0; /** * Number of qualified statements that use this property. */ public int statementWithQualifierCount = 0; /** * Number of statement qualifiers that use this property. */ public int qualifierCount = 0; /** * Number of uses of this property in references. Multiple uses in the * same references will be counted. */ public int referenceCount = 0; /** * {@link PropertyDocument} for this property. */ public PropertyDocument propertyDocument = null; } /** * Class to record the usage of a class item in the data. * * @author Markus Kroetzsch * */ private static class ClassRecord extends UsageRecord { /** * Number of subclasses of this class item. */ public int subclassCount = 0; /** * {@link ItemDocument} of this class. */ public ItemDocument itemDocument = null; /** * List of all super classes of this class. */ public ArrayList superClasses = new ArrayList<>(); } /** * Comparator to order class items by their number of instances and direct * subclasses. * * @author Markus Kroetzsch * */ private static class ClassUsageRecordComparator implements Comparator> { @Override public int compare( Entry o1, Entry o2) { return o2.getValue().subclassCount + o2.getValue().itemCount - (o1.getValue().subclassCount + o1.getValue().itemCount); } } /** * Comparator to order class items by their number of instances and direct * subclasses. * * @author Markus Kroetzsch * */ private static class UsageRecordComparator implements Comparator> { @Override public int compare( Entry o1, Entry o2) { return (o2.getValue().itemCount + o2.getValue().qualifierCount + o2 .getValue().referenceCount) - (o1.getValue().itemCount + o1.getValue().qualifierCount + o1 .getValue().referenceCount); } } /** * Total number of items processed. */ long countItems = 0; /** * Total number of items that have some statement. */ long countPropertyItems = 0; /** * Total number of properties processed. */ long countProperties = 0; /** * Total number of items that are used as classes. */ long countClasses = 0; /** * Collection of all property records. */ final HashMap propertyRecords = new HashMap<>(); /** * Collection of all item records of items used as classes. */ final HashMap classRecords = new HashMap<>(); /** * Map used during serialization to ensure that every label is used only * once. The Map assigns an item to each label. If another item wants to use * a label that is already assigned, it will use a label with an added Q-ID * for disambiguation. */ final HashMap labels = new HashMap<>(); /** * Main method. Processes the whole dump using this processor. To change * which dump file to use and whether to run in offline mode, modify the * settings in {@link ExampleHelpers}. */ public static void main(String[] args) { ExampleHelpers.configureLogging(); ClassPropertyUsageAnalyzer.printDocumentation(); ClassPropertyUsageAnalyzer processor = new ClassPropertyUsageAnalyzer(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.writeFinalReports(); } @Override public void processItemDocument(ItemDocument itemDocument) { this.countItems++; if (itemDocument.getStatementGroups().size() > 0) { this.countPropertyItems++; } ClassRecord classRecord = null; if (TOP_LEVEL_CLASSES.contains(itemDocument.getEntityId().getId()) || this.classRecords.containsKey(itemDocument.getEntityId())) { classRecord = getClassRecord(itemDocument.getEntityId()); } for (StatementGroup sg : itemDocument.getStatementGroups()) { PropertyRecord propertyRecord = getPropertyRecord(sg.getProperty()); propertyRecord.itemCount++; propertyRecord.statementCount += sg.size(); boolean isInstanceOf = "P31".equals(sg.getProperty().getId()); boolean isSubclassOf = "P279".equals(sg.getProperty().getId()); if (isSubclassOf && classRecord == null) { classRecord = getClassRecord(itemDocument.getEntityId()); } for (Statement s : sg) { // Count uses of properties in qualifiers for (SnakGroup q : s.getQualifiers()) { countPropertyQualifier(q.getProperty(), q.size()); } // Count statements with qualifiers if (s.getQualifiers().size() > 0) { propertyRecord.statementWithQualifierCount++; } // Count uses of properties in references for (Reference r : s.getReferences()) { for (SnakGroup snakGroup : r.getSnakGroups()) { countPropertyReference(snakGroup.getProperty(), snakGroup.size()); } } // Process value of instance of/subclass of: if ((isInstanceOf || isSubclassOf) && s.getMainSnak() instanceof ValueSnak) { Value value = s.getValue(); if (value instanceof EntityIdValue) { ClassRecord otherClassRecord = getClassRecord((EntityIdValue) value); if (isInstanceOf) { otherClassRecord.itemCount++; countCooccurringProperties(itemDocument, otherClassRecord, null); } else { otherClassRecord.subclassCount++; classRecord.superClasses.add((EntityIdValue) value); } } } } countCooccurringProperties(itemDocument, propertyRecord, sg.getProperty()); } if (classRecord != null) { this.countClasses++; classRecord.itemDocument = itemDocument; } // print a report once in a while: if (this.countItems % 100000 == 0) { printReport(); } // if (this.countItems % 100000 == 0) { // writePropertyData(); // writeClassData(); // } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { this.countProperties++; PropertyRecord propertyRecord = getPropertyRecord(propertyDocument.getEntityId()); propertyRecord.propertyDocument = propertyDocument; } /** * Creates the final file output of the analysis. */ public void writeFinalReports() { writePropertyData(); writeClassData(); } /** * Print some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out .println("*** Wikidata Toolkit: Class and Property Usage Analyzer"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will create a CSV file with statistics about class and"); System.out .println("*** property useage. These files can be used with the Miga data"); System.out.println("*** viewer to create the browser seen at "); System.out .println("*** http://tools.wmflabs.org/wikidata-exports/miga/"); System.out .println("********************************************************************"); } /** * Returns record where statistics about a class should be stored. * * @param entityIdValue * the class to initialize * @return the class record */ private ClassRecord getClassRecord(EntityIdValue entityIdValue) { if (!this.classRecords.containsKey(entityIdValue)) { ClassRecord classRecord = new ClassRecord(); this.classRecords.put(entityIdValue, classRecord); return classRecord; } else { return this.classRecords.get(entityIdValue); } } /** * Returns record where statistics about a property should be stored. * * @param property * the property to initialize * @return the property record */ private PropertyRecord getPropertyRecord(PropertyIdValue property) { if (!this.propertyRecords.containsKey(property)) { PropertyRecord propertyRecord = new PropertyRecord(); this.propertyRecords.put(property, propertyRecord); return propertyRecord; } else { return this.propertyRecords.get(property); } } private void countCooccurringProperties(ItemDocument itemDocument, UsageRecord usageRecord, PropertyIdValue thisPropertyIdValue) { for (StatementGroup sg : itemDocument.getStatementGroups()) { if (!sg.getProperty().equals(thisPropertyIdValue)) { if (!usageRecord.propertyCoCounts.containsKey(sg.getProperty())) { usageRecord.propertyCoCounts.put(sg.getProperty(), 1); } else { usageRecord.propertyCoCounts .put(sg.getProperty(), usageRecord.propertyCoCounts .get(sg.getProperty()) + 1); } } } } /** * Counts additional occurrences of a property as qualifier property of * statements. * * @param property * the property to count * @param count * the number of times to count the property */ private void countPropertyQualifier(PropertyIdValue property, int count) { PropertyRecord propertyRecord = getPropertyRecord(property); propertyRecord.qualifierCount = propertyRecord.qualifierCount + count; } /** * Counts additional occurrences of a property as property in references. * * @param property * the property to count * @param count * the number of times to count the property */ private void countPropertyReference(PropertyIdValue property, int count) { PropertyRecord propertyRecord = getPropertyRecord(property); propertyRecord.referenceCount = propertyRecord.referenceCount + count; } /** * Prints a report about the statistics gathered so far. */ private void printReport() { System.out.println("Processed " + this.countItems + " items:"); System.out.println(" * Properties encountered: " + this.propertyRecords.size()); System.out.println(" * Property documents: " + this.countProperties); System.out.println(" * Classes encountered: " + this.classRecords.size()); System.out.println(" * Class documents: " + this.countClasses); } /** * Writes the data collected about properties to a file. */ private void writePropertyData() { try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("properties.csv"))) { out.println("Id" + ",Label" + ",Description" + ",URL" + ",Datatype" + ",Uses in statements" + ",Items with such statements" + ",Uses in statements with qualifiers" + ",Uses in qualifiers" + ",Uses in references" + ",Uses total" + ",Related properties"); List> list = new ArrayList<>( this.propertyRecords.entrySet()); list.sort(new UsageRecordComparator()); for (Entry entry : list) { printPropertyRecord(out, entry.getValue(), entry.getKey()); } } catch (IOException e) { e.printStackTrace(); } } /** * Writes the data collected about classes to a file. */ private void writeClassData() { try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("classes.csv"))) { out.println("Id" + ",Label" + ",Description" + ",URL" + ",Image" + ",Number of direct instances" + ",Number of direct subclasses" + ",Direct superclasses" + ",All superclasses" + ",Related properties"); List> list = new ArrayList<>( this.classRecords.entrySet()); list.sort(new ClassUsageRecordComparator()); for (Entry entry : list) { if (entry.getValue().itemCount > 0 || entry.getValue().subclassCount > 0) { printClassRecord(out, entry.getValue(), entry.getKey()); } } } catch (IOException e) { e.printStackTrace(); } } /** * Prints the data for a single class to the given stream. This will be a * single line in CSV. * * @param out * the output to write to * @param classRecord * the class record to write * @param entityIdValue * the item id that this class record belongs to */ private void printClassRecord(PrintStream out, ClassRecord classRecord, EntityIdValue entityIdValue) { printTerms(out, classRecord.itemDocument, entityIdValue, "\"" + getClassLabel(entityIdValue) + "\""); printImage(out, classRecord.itemDocument); out.print("," + classRecord.itemCount + "," + classRecord.subclassCount); printClassList(out, classRecord.superClasses); HashSet superClasses = new HashSet<>(); for (EntityIdValue superClass : classRecord.superClasses) { addSuperClasses(superClass, superClasses); } printClassList(out, superClasses); printRelatedProperties(out, classRecord); out.println(); } /** * Prints a list of classes to the given output. The list is encoded as a * single CSV value, using "@" as a separator. Miga can decode this. * Standard CSV processors do not support lists of entries as values, * however. * * @param out * the output to write to * @param classes * the list of class items */ private void printClassList(PrintStream out, Iterable classes) { out.print(",\""); boolean first = true; for (EntityIdValue superClass : classes) { if (first) { first = false; } else { out.print("@"); } // makeshift escaping for Miga: out.print(getClassLabel(superClass).replace("@", "@")); } out.print("\""); } private void addSuperClasses(EntityIdValue itemIdValue, HashSet superClasses) { if (superClasses.contains(itemIdValue)) { return; } superClasses.add(itemIdValue); ClassRecord classRecord = this.classRecords.get(itemIdValue); if (classRecord == null) { return; } for (EntityIdValue superClass : classRecord.superClasses) { addSuperClasses(superClass, superClasses); } } /** * Prints the terms (label, etc.) of one entity to the given stream. This * will lead to several values in the CSV file, which are the same for * properties and class items. * * @param out * the output to write to * @param termedDocument * the document that provides the terms to write * @param entityIdValue * the entity that the data refers to. * @param specialLabel * special label to use (rather than the label string in the * document) or null if not using; used by classes, which need to * support disambiguation in their labels */ private void printTerms(PrintStream out, TermedDocument termedDocument, EntityIdValue entityIdValue, String specialLabel) { String label = specialLabel; String description = "-"; if (termedDocument != null) { if (label == null) { MonolingualTextValue labelValue = termedDocument.getLabels() .get("en"); if (labelValue != null) { label = csvStringEscape(labelValue.getText()); } } MonolingualTextValue descriptionValue = termedDocument .getDescriptions().get("en"); if (descriptionValue != null) { description = csvStringEscape(descriptionValue.getText()); } } if (label == null) { label = entityIdValue.getId(); } out.print(entityIdValue.getId() + "," + label + "," + description + "," + entityIdValue.getIri()); } /** * Prints the URL of a thumbnail for the given item document to the output, * or a default image if no image is given for the item. * * @param out * the output to write to * @param itemDocument * the document that may provide the image information */ private void printImage(PrintStream out, ItemDocument itemDocument) { String imageFile = null; if (itemDocument != null) { for (StatementGroup sg : itemDocument.getStatementGroups()) { boolean isImage = "P18".equals(sg.getProperty().getId()); if (!isImage) { continue; } for (Statement s : sg) { if (s.getMainSnak() instanceof ValueSnak) { Value value = ((ValueSnak)s.getMainSnak()).getValue(); if (value instanceof StringValue) { imageFile = ((StringValue) value).getString(); break; } } } if (imageFile != null) { break; } } } if (imageFile == null) { out.print(",\"http://commons.wikimedia.org/w/thumb.php?f=MA_Route_blank.svg&w=50\""); } else { try { String imageFileEncoded; imageFileEncoded = URLEncoder.encode( imageFile.replace(" ", "_"), "utf-8"); // Keep special title symbols unescaped: imageFileEncoded = imageFileEncoded.replace("%3A", ":") .replace("%2F", "/"); out.print("," + csvStringEscape("http://commons.wikimedia.org/w/thumb.php?f=" + imageFileEncoded) + "&w=50"); } catch (UnsupportedEncodingException e) { throw new RuntimeException( "Your JRE does not support UTF-8 encoding. Srsly?!", e); } } } /** * Prints the data of one property to the given output. This will be a * single line in CSV. * * @param out * the output to write to * @param propertyRecord * the data to write * @param propertyIdValue * the property that the data refers to */ private void printPropertyRecord(PrintStream out, PropertyRecord propertyRecord, PropertyIdValue propertyIdValue) { printTerms(out, propertyRecord.propertyDocument, propertyIdValue, null); String datatype = "Unknown"; if (propertyRecord.propertyDocument != null) { datatype = getDatatypeLabel(propertyRecord.propertyDocument .getDatatype()); } out.print("," + datatype + "," + propertyRecord.statementCount + "," + propertyRecord.itemCount + "," + propertyRecord.statementWithQualifierCount + "," + propertyRecord.qualifierCount + "," + propertyRecord.referenceCount + "," + (propertyRecord.statementCount + propertyRecord.qualifierCount + propertyRecord.referenceCount)); printRelatedProperties(out, propertyRecord); out.println(); } /** * Returns an English label for a given datatype. * * @param datatype * the datatype to label * @return the label */ private String getDatatypeLabel(DatatypeIdValue datatype) { if (datatype.getIri() == null) { // TODO should be redundant once the // JSON parsing works return "Unknown"; } switch (datatype.getJsonString()) { case DatatypeIdValue.JSON_DT_COMMONS_MEDIA: return "Commons media"; case DatatypeIdValue.JSON_DT_GLOBE_COORDINATES: return "Globe coordinates"; case DatatypeIdValue.JSON_DT_ITEM: return "Item"; case DatatypeIdValue.JSON_DT_QUANTITY: return "Quantity"; case DatatypeIdValue.JSON_DT_STRING: return "String"; case DatatypeIdValue.JSON_DT_TIME: return "Time"; case DatatypeIdValue.JSON_DT_URL: return "URL"; case DatatypeIdValue.JSON_DT_PROPERTY: return "Property"; case DatatypeIdValue.JSON_DT_EXTERNAL_ID: return "External identifier"; case DatatypeIdValue.JSON_DT_MATH: return "Math"; case DatatypeIdValue.JSON_DT_MONOLINGUAL_TEXT: return "Monolingual Text"; default: throw new RuntimeException("Unknown datatype " + datatype.getJsonString()); } } /** * Prints a list of related properties to the output. The list is encoded as * a single CSV value, using "@" as a separator. Miga can decode this. * Standard CSV processors do not support lists of entries as values, * however. * * @param out * the output to write to * @param usageRecord * the data to write */ private void printRelatedProperties(PrintStream out, UsageRecord usageRecord) { List> list = new ArrayList<>( usageRecord.propertyCoCounts.size()); for (Entry coCountEntry : usageRecord.propertyCoCounts .entrySet()) { double otherThisItemRate = (double) coCountEntry.getValue() / usageRecord.itemCount; double otherGlobalItemRate = (double) this.propertyRecords .get(coCountEntry.getKey()).itemCount / this.countPropertyItems; double otherThisItemRateStep = 1 / (1 + Math.exp(6 * (-2 * otherThisItemRate + 0.5))); double otherInvGlobalItemRateStep = 1 / (1 + Math.exp(6 * (-2 * (1 - otherGlobalItemRate) + 0.5))); list.add(new ImmutablePair<>(coCountEntry .getKey(), otherThisItemRateStep * otherInvGlobalItemRateStep * otherThisItemRate / otherGlobalItemRate)); } list.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue())); out.print(",\""); int count = 0; for (ImmutablePair relatedProperty : list) { if (relatedProperty.right < 1.5) { break; } if (count > 0) { out.print("@"); } // makeshift escaping for Miga: out.print(getPropertyLabel(relatedProperty.left).replace("@", "@")); count++; } out.print("\""); } /** * Returns a string that should be used as a label for the given property. * * @param propertyIdValue * the property to label * @return the label */ private String getPropertyLabel(PropertyIdValue propertyIdValue) { PropertyRecord propertyRecord = this.propertyRecords .get(propertyIdValue); if (propertyRecord == null || propertyRecord.propertyDocument == null) { return propertyIdValue.getId(); } else { return getLabel(propertyIdValue, propertyRecord.propertyDocument); } } /** * Returns a string that should be used as a label for the given item. The * method also ensures that each label is used for only one class. Other * classes with the same label will have their QID added for disambiguation. * * @param entityIdValue * the item to label * @return the label */ private String getClassLabel(EntityIdValue entityIdValue) { ClassRecord classRecord = this.classRecords.get(entityIdValue); String label; if (classRecord == null || classRecord.itemDocument == null) { label = entityIdValue.getId(); } else { label = getLabel(entityIdValue, classRecord.itemDocument); } EntityIdValue labelOwner = this.labels.get(label); if (labelOwner == null) { this.labels.put(label, entityIdValue); return label; } else if (labelOwner.equals(entityIdValue)) { return label; } else { return label + " (" + entityIdValue.getId() + ")"; } } /** * Returns the CSV-escaped label for the given entity based on the terms in * the given document. The returned string will have its quotes escaped, but * it will not be put in quotes (since this is not appropriate in all * contexts where this method is used). * * @param entityIdValue * the entity to label * @param termedDocument * the document to get labels from * @return the label */ private String getLabel(EntityIdValue entityIdValue, TermedDocument termedDocument) { MonolingualTextValue labelValue = termedDocument.getLabels().get("en"); if (labelValue != null) { return labelValue.getText().replace("\"", "\"\""); } else { return entityIdValue.getId(); } } /** * Escapes a string for use in CSV. In particular, the string is quoted and * quotation marks are escaped. * * @param string * the string to escape * @return the escaped string */ private String csvStringEscape(String string) { return "\"" + string.replace("\"", "\"\"") + "\""; } } DataExtractionProcessor.java000066400000000000000000000130761444772566300350050ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2016 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.Value; /** * This simple {@link EntityDocumentProcessor} finds all items with a GND * identifier (property P227) who are also humans (P31 with value Q5), and * extracts for each of them the id, GND value, as well as English and German * labels and Wikipedia articles, if any. The results are written to a CSV file * "extracted-data.csv". The extracted property can be modified by changing the * value for {@link DataExtractionProcessor#extractPropertyId}. The current code * only extracts the first value for this property if many are given. The filter * condition (P31::Q5) can also be changed in the code. * * @author Markus Kroetzsch * */ public class DataExtractionProcessor implements EntityDocumentProcessor { static final String extractPropertyId = "P227"; // "GND identifier" static final String filterPropertyId = "P31"; // "instance of" static final Value filterValue = Datamodel.makeWikidataItemIdValue("Q5"); // "human" int itemsWithPropertyCount = 0; int itemCount = 0; PrintStream out; /** * Main method. Processes the whole dump using this processor. To change * which dump file to use and whether to run in offline mode, modify the * settings in {@link ExampleHelpers}. * * @param args * @throws IOException */ public static void main(String[] args) throws IOException { ExampleHelpers.configureLogging(); DataExtractionProcessor.printDocumentation(); DataExtractionProcessor processor = new DataExtractionProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.close(); } public DataExtractionProcessor() throws IOException { // open file for writing results: out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("extracted-data.csv")); // write CSV header: out.println("ID,Label (en),Label (de),Value,Wikipedia (en),Wikipedia (de)"); } @Override public void processItemDocument(ItemDocument itemDocument) { this.itemCount++; // Check if the item matches our filter conditions: if (!itemDocument.hasStatementValue(filterPropertyId, filterValue)) { return; } // Find the first value for this property, if any: StringValue stringValue = itemDocument .findStatementStringValue(extractPropertyId); // If a value was found, write the data: if (stringValue != null) { this.itemsWithPropertyCount++; out.print(itemDocument.getEntityId().getId()); out.print(","); out.print(csvEscape(itemDocument.findLabel("en"))); out.print(","); out.print(csvEscape(itemDocument.findLabel("de"))); out.print(","); out.print(csvEscape(stringValue.getString())); out.print(","); SiteLink enwiki = itemDocument.getSiteLinks().get("enwiki"); if (enwiki != null) { out.print(csvEscape(enwiki.getPageTitle())); } else { out.print("\"\""); } out.print(","); SiteLink dewiki = itemDocument.getSiteLinks().get("dewiki"); if (dewiki != null) { out.print(csvEscape(dewiki.getPageTitle())); } else { out.print("\"\""); } out.println(); } // Print progress every 100,000 items: if (this.itemCount % 100000 == 0) { printStatus(); } } /** * Escapes a string for use in CSV. In particular, the string is quoted and * quotation marks are escaped. * * @param string * the string to escape * @return the escaped string */ private String csvEscape(String string) { if (string == null) { return "\"\""; } else { return "\"" + string.replace("\"", "\"\"") + "\""; } } /** * Prints the current status, time and entity count. */ public void printStatus() { System.out.println("Found " + this.itemsWithPropertyCount + " matching items after scanning " + this.itemCount + " items."); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: DataExtractionProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will scan the dump to find items with values for property"); System.out.println("*** " + extractPropertyId + " and print some data for these items to a CSV file. "); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } public void close() { printStatus(); this.out.close(); } } EditOnlineDataExample.java000066400000000000000000000307631444772566300343350ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.examples; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Map; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.ReferenceBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.util.WebResourceFetcherImpl; import org.wikidata.wdtk.wikibaseapi.ApiConnection; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.WikibaseDataEditor; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.EditConflictErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.NoSuchEntityErrorException; /** * This example shows how to create and modify data through the web API of a * Wikibase site. *

* IMPORTANT: Running this program will perform edits on test.wikidata.org. * These edits are permanent and public. When running this program as is, the * edits will be performed without logging in. This means that your current IP * address will be recorded in the edit history of the page. If you prefer to * use a login, please comment-in the respective line in the source code and * modify it to use your credentials. *

* Note that all modification operations can throw an * {@link MediaWikiApiErrorException} (if there was an API error) or * {@link IOException} (if there was a network error, etc.). We do not handle * this here. In real applications, you may want to handle some subclasses of * {@link MediaWikiApiErrorException} in special ways, e.g., * {@link EditConflictErrorException} (you tried to edit an entity that has been * modified by someone else since) and {@link NoSuchEntityErrorException} (you * tried to modify an entity that meanwhile was deleted). * * @author Markus Kroetzsch * */ public class EditOnlineDataExample { /** * We use this to identify the site test.wikidata.org. This IRI is not * essential for API interactions (the API knows of only one site and will * use local ids only), but it is important to use a fixed IRI in your code * for each site and not to mix IRIs. */ final static String siteIri = "http://www.test.wikidata.org/entity/"; static PropertyIdValue stringProperty1; static PropertyIdValue stringProperty2; static PropertyIdValue stringProperty3; static PropertyIdValue stringProperty4; static PropertyIdValue stringProperty5; public static void main(String[] args) throws IOException, MediaWikiApiErrorException { ExampleHelpers.configureLogging(); printDocumentation(); // Always set your User-Agent to the name of your application: WebResourceFetcherImpl .setUserAgent("Wikidata Toolkit EditOnlineDataExample"); ApiConnection connection = BasicApiConnection.getTestWikidataApiConnection(); // Optional login -- required for operations on real wikis: // connection.login("my username", "my password"); WikibaseDataEditor wbde = new WikibaseDataEditor(connection, siteIri); // Find some test properties on test.wikidata.org: findSomeStringProperties(connection); System.out.println("*** Creating a new entity ..."); ItemIdValue noid = ItemIdValue.NULL; // used when creating new items Statement statement1 = StatementBuilder .forSubjectAndProperty(noid, stringProperty1) .withValue(Datamodel.makeStringValue("String value 1")).build(); Statement statement2 = StatementBuilder .forSubjectAndProperty(noid, stringProperty1) .withValue( Datamodel .makeStringValue("Item created by Wikidata Toolkit example program; see https://github.com/Wikidata/Wikidata-Toolkit/")) .build(); Statement statement3 = StatementBuilder .forSubjectAndProperty(noid, stringProperty2) .withValue(Datamodel.makeStringValue("String value 3")).build(); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(noid) .withLabel("Wikidata Toolkit test", "en") .withStatement(statement1).withStatement(statement2) .withStatement(statement3).build(); // Note: we do not give a description, since label+description must be // unique, which would cause problems if this example is run many times. ItemDocument newItemDocument = wbde.createItemDocument(itemDocument, "Wikidata Toolkit example test item creation", null); ItemIdValue newItemId = newItemDocument.getEntityId(); System.out.println("*** Successfully created a new item " + newItemId.getId() + " (see https://test.wikidata.org/w/index.php?title=" + newItemId.getId() + "&oldid=" + newItemDocument.getRevisionId() + " for this version)"); System.out.println("*** Adding more statements to new entity ..."); // Make a statements with qualifiers for a change: Statement statement4 = StatementBuilder .forSubjectAndProperty(noid, stringProperty2) .withValue(Datamodel.makeStringValue("String value 4")) .withQualifierValue(stringProperty1, Datamodel.makeStringValue("Qualifier value 1")) .withQualifierValue(stringProperty1, Datamodel.makeStringValue("Qualifier value 2")) .withQualifierValue(stringProperty2, Datamodel.makeStringValue("Qualifier value 3")).build(); // Make a statement with the same claim as statement 1, // but with the additional reference; // WDTK will merge this automatically into the existing statement Reference reference1 = ReferenceBuilder .newInstance() .withPropertyValue(stringProperty4, Datamodel.makeStringValue("Reference property value 1")) .withPropertyValue(stringProperty5, Datamodel.makeStringValue("Reference property value 2")) .build(); Statement statement1WithRef = StatementBuilder .forSubjectAndProperty(noid, stringProperty1) .withValue(Datamodel.makeStringValue("String value 1")) .withReference(reference1).build(); // We add three statements: // * statement4: new statement; will be added // * statement1WithRef: extension of statement1; just add reference to // existing statement // * statement2: already present, will not be added again newItemDocument = wbde.updateStatements(newItemId, Arrays.asList(statement4, statement1WithRef, statement2), Collections.emptyList(), "Wikidata Toolkit example test statement addition", null); System.out.println("*** Successfully added statements to " + newItemId.getId() + " (see https://test.wikidata.org/w/index.php?title=" + newItemId.getId() + "&oldid=" + newItemDocument.getRevisionId() + " for this version)"); System.out .println("*** Deleting and modifying existing statements ..."); // We first need to find existing statements with their statement id. // For this we look at the item Document that we have last retrieved. Statement statementToModify = findStatementGroup(stringProperty1, newItemDocument).getStatements().get(0); // We replace this statement by a new one, with the same reference and // property but a different value. The id is essential to make sure that // we update the existing statement rather than adding a new one: Statement newStatement1 = StatementBuilder .forSubjectAndProperty(noid, stringProperty1) .withId(statementToModify.getStatementId()) .withValue(Datamodel.makeStringValue("Updated string value 1")) .withReferences(statementToModify.getReferences()).build(); // We also want to delete a statement: Statement statementToDelete = findStatementGroup(stringProperty2, newItemDocument).getStatements().get(0); newItemDocument = wbde.updateStatements(newItemDocument, Collections.singletonList(newStatement1), Collections.singletonList(statementToDelete), "Wikidata Toolkit example test statement modification", null); System.out.println("*** Successfully updated statements of " + newItemId.getId() + " (see https://test.wikidata.org/w/index.php?title=" + newItemId.getId() + "&oldid=" + newItemDocument.getRevisionId() + " for this version)"); System.out .println("*** The complete history of our edits can be seen at: " + "https://test.wikidata.org/w/index.php?title=" + newItemId.getId() + "&action=history"); System.out.println("*** Done."); } /** * Finds properties of datatype string on test.wikidata.org. Since the test * site changes all the time, we cannot hardcode a specific property here. * Instead, we just look through all properties starting from P1 to find the * first few properties of type string that have an English label. These * properties are used for testing in this code. * * @param connection * @throws MediaWikiApiErrorException * @throws IOException */ public static void findSomeStringProperties(ApiConnection connection) throws MediaWikiApiErrorException, IOException { WikibaseDataFetcher wbdf = new WikibaseDataFetcher(connection, siteIri); wbdf.getFilter().excludeAllProperties(); wbdf.getFilter().setLanguageFilter(Collections.singleton("en")); ArrayList stringProperties = new ArrayList<>(); System.out .println("*** Trying to find string properties for the example ... "); int propertyNumber = 1; while (stringProperties.size() < 5) { ArrayList fetchProperties = new ArrayList<>(); for (int i = propertyNumber; i < propertyNumber + 10; i++) { fetchProperties.add("P" + i); } propertyNumber += 10; Map results = wbdf .getEntityDocuments(fetchProperties); for (EntityDocument ed : results.values()) { PropertyDocument pd = (PropertyDocument) ed; if (DatatypeIdValue.JSON_DT_STRING.equals(pd.getDatatype().getJsonString()) && pd.getLabels().containsKey("en")) { stringProperties.add(pd.getEntityId()); System.out.println("* Found string property " + pd.getEntityId().getId() + " (" + pd.getLabels().get("en") + ")"); } } } stringProperty1 = stringProperties.get(0); stringProperty2 = stringProperties.get(1); stringProperty3 = stringProperties.get(2); stringProperty4 = stringProperties.get(3); stringProperty5 = stringProperties.get(4); System.out.println("*** Done."); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: EditOnlineDataExample"); System.out.println("*** "); System.out .println("*** This program creates and modifies online data at test.wikidata.org."); System.out .println("*** It does not download any dump files. See source code for details."); System.out .println("********************************************************************"); } /** * Finds the {@link StatementGroup} for the given property in a document. * * @param pid * the property to look for * @param document * the document to search * @return the {@link StatementGroup} with this property, or null if there * is none */ protected static StatementGroup findStatementGroup(PropertyIdValue pid, StatementDocument document) { for (StatementGroup sg : document.getStatementGroups()) { if (pid.equals(sg.getProperty())) { return sg; } } return null; } } EditOnlineMediaInfoExample.java000066400000000000000000000102661444772566300353130ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.interfaces.*; import org.wikidata.wdtk.wikibaseapi.ApiConnection; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.WikibaseDataEditor; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import java.io.IOException; import java.util.Collections; /** * This example shows how to create and modify media info data. *

* IMPORTANT: Running this program will perform edits on commons.wikimedia.beta.wmflabs.org. * These edits are permanent and public. When running this program as is, the * edits will be performed without logging in. This means that your current IP * address will be recorded in the edit history of the page. If you prefer to * use a login, please comment-in the respective line in the source code and * modify it to use your credentials. *

* * @author Thomas Pellissier Tanon * */ public class EditOnlineMediaInfoExample { /** * We use this to identify the site commons.wikimedia.beta.wmflabs.org. This IRI is not * essential for API interactions (the API knows of only one site and will * use local ids only), but it is important to use a fixed IRI in your code * for each site and not to mix IRIs. * * For wikimedia Commons, use {@link Datamodel.SITE_WIKIMEDIA_COMMONS}. */ private final static String siteIri = "https://commons.wikimedia.beta.wmflabs.org/entity/"; public static void main(String[] args) throws IOException, MediaWikiApiErrorException { ExampleHelpers.configureLogging(); printDocumentation(); ApiConnection connection = new BasicApiConnection("https://commons.wikimedia.beta.wmflabs.org/w/api.php"); // Optional login -- required for operations on real wikis: // connection.login("my username", "my password"); WikibaseDataFetcher wbdf = new WikibaseDataFetcher(connection, siteIri); WikibaseDataEditor wbde = new WikibaseDataEditor(connection, siteIri); System.out.println("*** Fetching the current media info or retrieving a dummy value..."); // If the entity does not exists, it's going to be returned anyway MediaInfoDocument mediaInfoDocument = (MediaInfoDocument) wbdf.getEntityDocumentByTitle("commonswiki", "File:RandomImage 4658098723742867.jpg"); System.out.println("*** Editing a media info document ..."); mediaInfoDocument = mediaInfoDocument .withLabel(Datamodel.makeMonolingualTextValue("random image", "en")) .withStatement(StatementBuilder .forSubjectAndProperty(mediaInfoDocument.getEntityId(), Datamodel.makeWikidataPropertyIdValue("P245962")) .withValue(Datamodel.makeWikidataItemIdValue("Q81566")).build()); wbde.editMediaInfoDocument(mediaInfoDocument, false, "Wikidata Toolkit example media info edit", Collections.emptyList()); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out.println("********************************************************************"); System.out.println("*** Wikidata Toolkit: MediaInfoDataExample"); System.out.println("*** "); System.out.println("*** This program creates and modifies online data at commons.wikimedia.beta.wmflabs.org."); System.out.println("*** It does not download any dump files. See source code for details."); System.out.println("********************************************************************"); } } EntityStatisticsProcessor.java000066400000000000000000000360611444772566300354210ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.TermedDocument; /** * A simple example class that processes EntityDocuments to compute basic * statistics that are printed to the standard output. Moreover, it stores * further statistics in several files: *

    *
  • The number of uses of each property in the data is counted and stored in * CSV files item-property-counts.csv (for statements used on items) and * property-property-counts.csv (for statements used on properties).
  • *
  • The number of links to each linked site is counted and stored in file * site-link-counts.csv.
  • *
  • The number of labels, aliases, and descriptions per language is counted * and stored in CSV files item-term-counts.csv (for items) and * property-term-counts.csv (for properties).
  • *
* * @author Markus Kroetzsch * */ class EntityStatisticsProcessor implements EntityDocumentProcessor { /** * Simple record class to keep track of some usage numbers for one type of * entity. * * @author Markus Kroetzsch * */ static class UsageStatistics { long count = 0; long countLabels = 0; long countDescriptions = 0; long countAliases = 0; long countStatements = 0; long countReferencedStatements = 0; // Maps to store property usage data for each property: final HashMap propertyCountsMain = new HashMap<>(); final HashMap propertyCountsQualifier = new HashMap<>(); final HashMap propertyCountsReferences = new HashMap<>(); final HashMap labelCounts = new HashMap<>(); final HashMap descriptionCounts = new HashMap<>(); final HashMap aliasCounts = new HashMap<>(); } UsageStatistics itemStatistics = new UsageStatistics(); UsageStatistics propertyStatistics = new UsageStatistics(); long countSiteLinks = 0; final HashMap siteLinkStatistics = new HashMap<>(); /** * Main method. Processes the whole dump using this processor and writes the * results to a file. To change which dump file to use and whether to run in * offline mode, modify the settings in {@link ExampleHelpers}. * * @param args */ public static void main(String[] args) { ExampleHelpers.configureLogging(); EntityStatisticsProcessor.printDocumentation(); EntityStatisticsProcessor entityStatisticsProcessor = new EntityStatisticsProcessor(); ExampleHelpers .processEntitiesFromWikidataDump(entityStatisticsProcessor); entityStatisticsProcessor.writeFinalResults(); } @Override public void processItemDocument(ItemDocument itemDocument) { // Count items: this.itemStatistics.count++; countTerms(this.itemStatistics, itemDocument); countStatements(this.itemStatistics, itemDocument); // Count site links: this.countSiteLinks += itemDocument.getSiteLinks().size(); for (SiteLink siteLink : itemDocument.getSiteLinks().values()) { countKey(this.siteLinkStatistics, siteLink.getSiteKey(), 1); } // Print a report every 10000 items: if (this.itemStatistics.count % 10000 == 0) { printStatus(); } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { // Count properties: this.propertyStatistics.count++; countTerms(this.propertyStatistics, propertyDocument); countStatements(this.propertyStatistics, propertyDocument); } /** * Count the terms (labels, descriptions, aliases) of an item or property * document. * * @param usageStatistics * statistics object to store counters in * @param termedDocument * document to count the terms of */ protected void countTerms(UsageStatistics usageStatistics, TermedDocument termedDocument) { usageStatistics.countLabels += termedDocument.getLabels().size(); for (MonolingualTextValue mtv : termedDocument.getLabels().values()) { countKey(usageStatistics.labelCounts, mtv.getLanguageCode(), 1); } usageStatistics.countDescriptions += termedDocument.getDescriptions() .size(); for (MonolingualTextValue mtv : termedDocument.getDescriptions() .values()) { countKey(usageStatistics.descriptionCounts, mtv.getLanguageCode(), 1); } for (String languageKey : termedDocument.getAliases().keySet()) { int count = termedDocument.getAliases().get(languageKey).size(); usageStatistics.countAliases += count; countKey(usageStatistics.aliasCounts, languageKey, count); } } /** * Count the statements and property uses of an item or property document. * * @param usageStatistics * statistics object to store counters in * @param statementDocument * document to count the statements of */ protected void countStatements(UsageStatistics usageStatistics, StatementDocument statementDocument) { // Count Statement data: for (StatementGroup sg : statementDocument.getStatementGroups()) { // Count Statements: usageStatistics.countStatements += sg.size(); // Count uses of properties in Statements: countPropertyMain(usageStatistics, sg.getProperty(), sg.size()); for (Statement s : sg) { for (SnakGroup q : s.getQualifiers()) { countPropertyQualifier(usageStatistics, q.getProperty(), q.size()); } for (Reference r : s.getReferences()) { usageStatistics.countReferencedStatements++; for (SnakGroup snakGroup : r.getSnakGroups()) { countPropertyReference(usageStatistics, snakGroup.getProperty(), snakGroup.size()); } } } } } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: EntityStatisticsProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will print progress information and some simple statistics."); System.out .println("*** Results about property usage will be stored in a CSV file."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } /** * Prints and stores final result of the processing. This should be called * after finishing the processing of a dump. It will print the statistics * gathered during processing and it will write a CSV file with usage counts * for every property. */ private void writeFinalResults() { // Print a final report: printStatus(); // Store property counts in files: writePropertyStatisticsToFile(this.itemStatistics, "item-property-counts.csv"); writePropertyStatisticsToFile(this.propertyStatistics, "property-property-counts.csv"); // Store site link statistics in file: try (PrintStream out = new PrintStream( ExampleHelpers .openExampleFileOuputStream("site-link-counts.csv"))) { out.println("Site key,Site links"); for (Entry entry : this.siteLinkStatistics .entrySet()) { out.println(entry.getKey() + "," + entry.getValue()); } } catch (IOException e) { e.printStackTrace(); } // Store term statistics in file: writeTermStatisticsToFile(this.itemStatistics, "item-term-counts.csv"); writeTermStatisticsToFile(this.propertyStatistics, "property-term-counts.csv"); } /** * Stores the gathered usage statistics about property uses to a CSV file. * * @param usageStatistics * the statistics to store * @param fileName * the name of the file to use */ private void writePropertyStatisticsToFile(UsageStatistics usageStatistics, String fileName) { try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream(fileName))) { out.println("Property id,in statements,in qualifiers,in references,total"); for (Entry entry : usageStatistics.propertyCountsMain .entrySet()) { int qCount = usageStatistics.propertyCountsQualifier.get(entry .getKey()); int rCount = usageStatistics.propertyCountsReferences.get(entry .getKey()); int total = entry.getValue() + qCount + rCount; out.println(entry.getKey().getId() + "," + entry.getValue() + "," + qCount + "," + rCount + "," + total); } } catch (IOException e) { e.printStackTrace(); } } /** * Stores the gathered usage statistics about term uses by language to a CSV * file. * * @param usageStatistics * the statistics to store * @param fileName * the name of the file to use */ private void writeTermStatisticsToFile(UsageStatistics usageStatistics, String fileName) { // Make sure all keys are present in label count map: for (String key : usageStatistics.aliasCounts.keySet()) { countKey(usageStatistics.labelCounts, key, 0); } for (String key : usageStatistics.descriptionCounts.keySet()) { countKey(usageStatistics.labelCounts, key, 0); } try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream(fileName))) { out.println("Language,Labels,Descriptions,Aliases"); for (Entry entry : usageStatistics.labelCounts .entrySet()) { countKey(usageStatistics.aliasCounts, entry.getKey(), 0); int aCount = usageStatistics.aliasCounts.get(entry.getKey()); countKey(usageStatistics.descriptionCounts, entry.getKey(), 0); int dCount = usageStatistics.descriptionCounts.get(entry .getKey()); out.println(entry.getKey() + "," + entry.getValue() + "," + dCount + "," + aCount); } } catch (IOException e) { e.printStackTrace(); } } /** * Prints a report about the statistics gathered so far. */ private void printStatus() { System.out.println("---"); printStatistics(this.itemStatistics, "items"); System.out.println(" * Site links: " + this.countSiteLinks); printStatistics(this.propertyStatistics, "properties"); } /** * Prints a report about the statistics stored in the given data object. * * @param usageStatistics * the statistics object to print * @param entityLabel * the label to use to refer to this kind of entities ("items" or * "properties") */ private void printStatistics(UsageStatistics usageStatistics, String entityLabel) { System.out.println("Processed " + usageStatistics.count + " " + entityLabel + ":"); System.out.println(" * Labels: " + usageStatistics.countLabels + ", descriptions: " + usageStatistics.countDescriptions + ", aliases: " + usageStatistics.countAliases); System.out.println(" * Statements: " + usageStatistics.countStatements + ", with references: " + usageStatistics.countReferencedStatements); } /** * Counts additional occurrences of a property as the main property of * statements. * * @param usageStatistics * statistics object where count is stored * @param property * the property to count * @param count * the number of times to count the property */ private void countPropertyMain(UsageStatistics usageStatistics, PropertyIdValue property, int count) { addPropertyCounters(usageStatistics, property); usageStatistics.propertyCountsMain.put(property, usageStatistics.propertyCountsMain.get(property) + count); } /** * Counts additional occurrences of a property as qualifier property of * statements. * * @param usageStatistics * statistics object where count is stored * @param property * the property to count * @param count * the number of times to count the property */ private void countPropertyQualifier(UsageStatistics usageStatistics, PropertyIdValue property, int count) { addPropertyCounters(usageStatistics, property); usageStatistics.propertyCountsQualifier.put(property, usageStatistics.propertyCountsQualifier.get(property) + count); } /** * Counts additional occurrences of a property as property in references. * * @param usageStatistics * statistics object where count is stored * @param property * the property to count * @param count * the number of times to count the property */ private void countPropertyReference(UsageStatistics usageStatistics, PropertyIdValue property, int count) { addPropertyCounters(usageStatistics, property); usageStatistics.propertyCountsReferences.put(property, usageStatistics.propertyCountsReferences.get(property) + count); } /** * Initializes the counters for a property to zero if not done yet. * * @param usageStatistics * statistics object to initialize * @param property * the property to count */ private void addPropertyCounters(UsageStatistics usageStatistics, PropertyIdValue property) { if (!usageStatistics.propertyCountsMain.containsKey(property)) { usageStatistics.propertyCountsMain.put(property, 0); usageStatistics.propertyCountsQualifier.put(property, 0); usageStatistics.propertyCountsReferences.put(property, 0); } } /** * Helper method that stores in a hash map how often a certain key occurs. * If the key has not been encountered yet, a new entry is created for it in * the map. Otherwise the existing value for the key is incremented. * * @param map * the map where the counts are stored * @param key * the key to be counted * @param count * value by which the count should be incremented; 1 is the usual * case */ private void countKey(Map map, String key, int count) { if (map.containsKey(key)) { map.put(key, map.get(key) + count); } else { map.put(key, count); } } } Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/ExampleHelpers.java000066400000000000000000000212701444772566300331630ustar00rootroot00000000000000package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.FileOutputStream; import java.io.IOException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.dumpfiles.DumpProcessingController; import org.wikidata.wdtk.dumpfiles.EntityTimerProcessor; import org.wikidata.wdtk.dumpfiles.EntityTimerProcessor.TimeoutException; import org.wikidata.wdtk.dumpfiles.MwDumpFile; /** * Class for sharing code that is used in many examples. It contains several * static final members that can be modified to change the behaviour of example * programs, such as whether to use {@link ExampleHelpers#OFFLINE_MODE} or not. * * @author Markus Kroetzsch * */ public class ExampleHelpers { /** * If set to true, all example programs will run in offline mode. Only data * dumps that have been downloaded in previous runs will be used. */ public static final boolean OFFLINE_MODE = false; /** * Enum to say which dumps should be downloaded and processed. Used as * possible values of {@link ExampleHelpers#DUMP_FILE_MODE}. */ public enum DumpProcessingMode { JSON, CURRENT_REVS, ALL_REVS, CURRENT_REVS_WITH_DAILIES, ALL_REVS_WITH_DAILIES, JUST_ONE_DAILY_FOR_TEST } /** * Defines which dumps will be downloaded and processed in all examples. */ public static final DumpProcessingMode DUMP_FILE_MODE = DumpProcessingMode.JSON; /** * The directory where to place files created by the example applications. */ public static final String EXAMPLE_OUTPUT_DIRECTORY = "results"; /** * Timeout to abort processing after a short while or 0 to disable timeout. * If set, then the processing will cleanly exit after about this many * seconds, as if the dump file would have ended there. This is useful for * testing (and in particular better than just aborting the program) since * it allows for final processing and proper closing to happen without * having to wait for a whole dump file to process. */ public static final int TIMEOUT_SEC = 0; /** * Identifier of the dump file that was processed last. This can be used to * name files generated while processing a dump file. */ private static String lastDumpFileName = ""; /** * Defines how messages should be logged. This method can be modified to * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. */ public static void configureLogging() { // Create the appender that will write log messages to the console. ConsoleAppender consoleAppender = new ConsoleAppender(); // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: consoleAppender.setThreshold(Level.INFO); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); } /** * Processes all entities in a Wikidata dump using the given entity * processor. By default, the most recent JSON dump will be used. In offline * mode, only the most recent previously downloaded file is considered. * * @param entityDocumentProcessor * the object to use for processing entities in this dump */ public static void processEntitiesFromWikidataDump( EntityDocumentProcessor entityDocumentProcessor) { // Controller object for processing dumps: DumpProcessingController dumpProcessingController = new DumpProcessingController( "wikidatawiki"); dumpProcessingController.setOfflineMode(OFFLINE_MODE); // // Optional: Use another download directory: // dumpProcessingController.setDownloadDirectory(System.getProperty("user.dir")); // Should we process historic revisions or only current ones? boolean onlyCurrentRevisions; switch (DUMP_FILE_MODE) { case ALL_REVS: case ALL_REVS_WITH_DAILIES: onlyCurrentRevisions = false; break; case CURRENT_REVS: case CURRENT_REVS_WITH_DAILIES: case JSON: case JUST_ONE_DAILY_FOR_TEST: default: onlyCurrentRevisions = true; } // Subscribe to the most recent entity documents of type wikibase item: dumpProcessingController.registerEntityDocumentProcessor( entityDocumentProcessor, null, onlyCurrentRevisions); // Also add a timer that reports some basic progress information: EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor( TIMEOUT_SEC); dumpProcessingController.registerEntityDocumentProcessor( entityTimerProcessor, null, onlyCurrentRevisions); MwDumpFile dumpFile = null; try { // Start processing (may trigger downloads where needed): switch (DUMP_FILE_MODE) { case ALL_REVS: case CURRENT_REVS: dumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.FULL); break; case ALL_REVS_WITH_DAILIES: case CURRENT_REVS_WITH_DAILIES: MwDumpFile fullDumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.FULL); MwDumpFile incrDumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.DAILY); lastDumpFileName = fullDumpFile.getProjectName() + "-" + incrDumpFile.getDateStamp() + "." + fullDumpFile.getDateStamp(); dumpProcessingController.processAllRecentRevisionDumps(); break; case JSON: dumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.JSON); break; case JUST_ONE_DAILY_FOR_TEST: dumpFile = dumpProcessingController .getMostRecentDump(DumpContentType.DAILY); break; default: throw new RuntimeException("Unsupported dump processing type " + DUMP_FILE_MODE); } if (dumpFile != null) { lastDumpFileName = dumpFile.getProjectName() + "-" + dumpFile.getDateStamp(); dumpProcessingController.processDump(dumpFile); } } catch (TimeoutException e) { // The timer caused a time out. Continue and finish normally. } // Print final timer results: entityTimerProcessor.close(); } /** * Opens a new FileOutputStream for a file of the given name in the example * output directory ({@link ExampleHelpers#EXAMPLE_OUTPUT_DIRECTORY}). Any * file of this name that exists already will be replaced. The caller is * responsible for eventually closing the stream. * * @param filename * the name of the file to write to * @return FileOutputStream for the file * @throws IOException * if the file or example output directory could not be created */ public static FileOutputStream openExampleFileOuputStream(String filename) throws IOException { Path directoryPath; if ("".equals(lastDumpFileName)) { directoryPath = Paths.get(EXAMPLE_OUTPUT_DIRECTORY); } else { directoryPath = Paths.get(EXAMPLE_OUTPUT_DIRECTORY); createDirectory(directoryPath); directoryPath = directoryPath.resolve( lastDumpFileName); } createDirectory(directoryPath); Path filePath = directoryPath.resolve(filename); return new FileOutputStream(filePath.toFile()); } /** * Returns the name of the dump file that was last processed. This can be * used to name files generated from this dump. The result might be the * empty string if no file has been processed yet. */ public static String getLastDumpFileName() { return lastDumpFileName; } /** * Create a directory at the given path if it does not exist yet. * * @param path * the path to the directory * @throws IOException * if it was not possible to create a directory at the given * path */ private static void createDirectory(Path path) throws IOException { try { Files.createDirectory(path); } catch (FileAlreadyExistsException e) { if (!Files.isDirectory(path)) { throw e; } } } } FetchOnlineDataExample.java000066400000000000000000000106561444772566300345000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.WbSearchEntitiesResult; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; public class FetchOnlineDataExample { public static void main(String[] args) throws MediaWikiApiErrorException, IOException { ExampleHelpers.configureLogging(); printDocumentation(); WikibaseDataFetcher wbdf = new WikibaseDataFetcher( BasicApiConnection.getWikidataApiConnection(), Datamodel.SITE_WIKIDATA); System.out.println("*** Fetching data for one entity:"); EntityDocument q42 = wbdf.getEntityDocument("Q42"); System.out.println(q42); if (q42 instanceof ItemDocument) { System.out.println("The English name for entity Q42 is " + ((ItemDocument) q42).getLabels().get("en").getText()); } System.out.println("*** Fetching data for several entities:"); Map results = wbdf.getEntityDocuments("Q80", "P31"); // Keys of this map are Qids, but we only use the values here: for (EntityDocument ed : results.values()) { System.out.println("Successfully retrieved data for " + ed.getEntityId().getId()); } System.out .println("*** Fetching data using filters to reduce data volume:"); // Only site links from English Wikipedia: wbdf.getFilter().setSiteLinkFilter(Collections.singleton("enwiki")); // Only labels in French: wbdf.getFilter().setLanguageFilter(Collections.singleton("fr")); // No statements at all: wbdf.getFilter().setPropertyFilter(Collections.emptySet()); EntityDocument q8 = wbdf.getEntityDocument("Q8"); if (q8 instanceof ItemDocument) { System.out.println("The French label for entity Q8 is " + ((ItemDocument) q8).getLabels().get("fr").getText() + "\nand its English Wikipedia page has the title " + ((ItemDocument) q8).getSiteLinks().get("enwiki") .getPageTitle() + "."); } System.out.println("*** Fetching data based on page title:"); EntityDocument edPratchett = wbdf.getEntityDocumentByTitle("enwiki", "Terry Pratchett"); System.out.println("The Qid of Terry Pratchett is " + edPratchett.getEntityId().getId()); System.out.println("*** Fetching data based on several page titles:"); results = wbdf.getEntityDocumentsByTitle("enwiki", "Wikidata", "Wikipedia"); // In this case, keys are titles rather than Qids for (Entry entry : results.entrySet()) { System.out .println("Successfully retrieved data for page entitled \"" + entry.getKey() + "\": " + entry.getValue().getEntityId().getId()); } System.out.println("** Doing search on Wikidata:"); for(WbSearchEntitiesResult result : wbdf.searchEntities("Douglas Adams", "fr")) { System.out.println("Found " + result.getEntityId() + " with label " + result.getLabel()); } System.out.println("*** Done."); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: FetchOnlineDataExample"); System.out.println("*** "); System.out .println("*** This program fetches individual data using the wikidata.org API."); System.out.println("*** It does not download any dump files."); System.out .println("********************************************************************"); } } GenderRatioProcessor.java000066400000000000000000000336521444772566300343000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.Value; /** * This document processor calculates the gender ratios of people featured on * Wikimedia projects. It is inspired by the investigations of Max Klein. *

* For each Wikidata item we consider all the Wikimedia projects (Wikipedia * etc.) that have an article on this subject. We find out if the Wikidata item * is about a human and which sex/gender values it has (if any). We then count * the pages, humans, humans with gender, and humans with each particular gender * for each site. The script genderates intermediate status reports for the * biggest sites, and eventually writes a CSV file with all the data for all the * sites. *

* There are certainly more than two genders, but in fact we cannot even assume * a previously known list of genders. So we collect the data in a way that * allows arbitrary items as values for gender. We make an effort to find an * English label for all of them, but we don't go as far as looking through the * dump twice (if we encounter a gender value after the item for that gender was * already processed, we cannot go back to fetch the value). It is possible to * preconfigure some labels so as to have them set from the very start. *

* The program could also be used to compare the amount of other articles by * language. For this, the value of {@link GenderRatioProcessor#filterClass} can * be changed. * * @author Markus Kroetzsch * */ public class GenderRatioProcessor implements EntityDocumentProcessor { int itemCount = 0; int genderItemCount = 0; boolean printedStatus = true; /** * Class to store basic information for each site in a simple format. * * @author Markus Kroetzsch * */ public static class SiteRecord { public int pageCount = 0; public int humanGenderPageCount = 0; public int humanPageCount = 0; public final HashMap genderCounts = new HashMap<>(); public final String siteKey; public SiteRecord(String siteKey) { this.siteKey = siteKey; } } /** * Class to order site records human page count. * * @author Markus Kroetzsch * */ public static class SiteRecordComparator implements Comparator { @Override public int compare(SiteRecord o1, SiteRecord o2) { return o2.humanPageCount - o1.humanPageCount; } } final HashMap siteRecords = new HashMap<>(); final HashMap genderNames = new HashMap<>(); final List genderNamesList = new ArrayList<>(); /** * Class to use for filtering items. This can be changed to analyse a more * specific set of items. Gender information will always be collected, but * it would not be a problem if there was none. For example, you could use * the same code to compare the number of articles about lighthouses * (Q39715) by site; the gender counts would (hopefully) be zero in this * case. */ static final ItemIdValue filterClass = Datamodel .makeWikidataItemIdValue("Q5"); /** * Main method. Processes the whole dump using this processor and writes the * results to a file. To change which dump file to use and whether to run in * offline mode, modify the settings in {@link ExampleHelpers}. */ public static void main(String[] args) { ExampleHelpers.configureLogging(); GenderRatioProcessor.printDocumentation(); GenderRatioProcessor processor = new GenderRatioProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.writeFinalResults(); } /** * Constructor. */ public GenderRatioProcessor() { // Pre-configure some common genders to get more readable status outputs // (we also extract labels from the dump, but usually only quite late) addNewGenderName( Datamodel.makeItemIdValue("Q6581072", Datamodel.SITE_WIKIDATA), "female"); addNewGenderName( Datamodel.makeItemIdValue("Q6581097", Datamodel.SITE_WIKIDATA), "male"); addNewGenderName( Datamodel.makeItemIdValue("Q48270", Datamodel.SITE_WIKIDATA), "genderqueer"); addNewGenderName( Datamodel.makeItemIdValue("Q1052281", Datamodel.SITE_WIKIDATA), "transgender female"); addNewGenderName( Datamodel.makeItemIdValue("Q2449503", Datamodel.SITE_WIKIDATA), "transgender male"); addNewGenderName( Datamodel.makeItemIdValue("Q1097630", Datamodel.SITE_WIKIDATA), "intersex"); addNewGenderName( Datamodel.makeItemIdValue("Q746411", Datamodel.SITE_WIKIDATA), "kathoey"); addNewGenderName( Datamodel.makeItemIdValue("Q1399232", Datamodel.SITE_WIKIDATA), "fa'afafine"); // Should not needed since we restrict to humans, but still happens: addNewGenderName( Datamodel.makeItemIdValue("Q43445", Datamodel.SITE_WIKIDATA), "female animal"); addNewGenderName( Datamodel.makeItemIdValue("Q44148", Datamodel.SITE_WIKIDATA), "male animal"); } @Override public void processItemDocument(ItemDocument itemDocument) { this.itemCount++; List genderValues = Collections.emptyList(); boolean isHumanWithGender = false; boolean isHuman = false; for (StatementGroup statementGroup : itemDocument.getStatementGroups()) { switch (statementGroup.getProperty().getId()) { case "P21": // P21 is "sex or gender" genderValues = getItemIdValueList(statementGroup); break; case "P31": // P31 is "instance of" isHuman = containsValue(statementGroup, filterClass); break; } } if (isHuman && genderValues.size() > 0) { isHumanWithGender = true; this.genderItemCount++; this.printedStatus = false; for (EntityIdValue gender : genderValues) { if (!this.genderNames.containsKey(gender)) { addNewGenderName(gender, gender.getId()); } } } // Record site data for (SiteLink siteLink : itemDocument.getSiteLinks().values()) { SiteRecord siteRecord = getSiteRecord(siteLink.getSiteKey()); siteRecord.pageCount++; if (isHumanWithGender) { siteRecord.humanGenderPageCount++; } if (isHuman) { siteRecord.humanPageCount++; } for (EntityIdValue gender : genderValues) { countGender(gender, siteRecord); } } // Also collect labels of items used as genders. // Only works if the gender is used before the item is processed, but // better than nothing. if (this.genderNames.containsKey(itemDocument.getEntityId())) { MonolingualTextValue label = itemDocument.getLabels().get("en"); if (label != null) { this.genderNames.put(itemDocument.getEntityId(), label.getText()); } } // Print status once in a while if (!this.printedStatus && this.genderItemCount % 100000 == 0) { printStatus(); this.printedStatus = true; } } /** * Writes the results of the processing to a CSV file. */ public void writeFinalResults() { printStatus(); try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("gender-ratios.csv"))) { out.print("Site key,pages total,pages on humans,pages on humans with gender"); for (EntityIdValue gender : this.genderNamesList) { out.print("," + this.genderNames.get(gender) + " (" + gender.getId() + ")"); } out.println(); List siteRecords = new ArrayList<>( this.siteRecords.values()); siteRecords.sort(new SiteRecordComparator()); for (SiteRecord siteRecord : siteRecords) { out.print(siteRecord.siteKey + "," + siteRecord.pageCount + "," + siteRecord.humanPageCount + "," + siteRecord.humanGenderPageCount); for (EntityIdValue gender : this.genderNamesList) { if (siteRecord.genderCounts.containsKey(gender)) { out.print("," + siteRecord.genderCounts.get(gender)); } else { out.print(",0"); } } out.println(); } } catch (IOException e) { e.printStackTrace(); } } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: GenderRatioProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will compute the numbers of articles about humans across"); System.out .println("*** Wikimedia projects, and in particular it will count the articles"); System.out .println("*** for each sex/gender. Results will be stored in a CSV file."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } /** * Prints the current status to the system output. */ private void printStatus() { System.out.println("*** Found " + genderItemCount + " items with gender within " + itemCount + " items."); System.out .println("*** Showing top ten sites with most items with gender data: "); int siteCount = 0; List siteRecords = new ArrayList<>( this.siteRecords.values()); siteRecords.sort(new SiteRecordComparator()); for (SiteRecord siteRecord : siteRecords) { if (siteCount >= 10) { break; } siteCount++; System.out.print(String.format("%1$8s", siteRecord.siteKey) + ": "); int genderCount = 0; for (EntityIdValue gender : this.genderNamesList) { System.out.print(this.genderNames.get(gender) + " "); int count; float ratio; if (siteRecord.genderCounts.containsKey(gender)) { count = siteRecord.genderCounts.get(gender); ratio = (float) count / siteRecord.humanGenderPageCount * 100; } else { count = 0; ratio = 0; } if (genderCount < 2) { System.out.printf("%7d (%5.3f%%) ", count, ratio); } else { System.out.printf("%3d (%5.4f%%) ", count, ratio); } genderCount++; } System.out .println(" -- gender pages: " + siteRecord.humanGenderPageCount + ", human pages: " + siteRecord.humanPageCount + ", total pages: " + siteRecord.pageCount + ", ghp/hp: " + ((float) siteRecord.humanGenderPageCount / siteRecord.humanPageCount * 100) + "%, hp/p: " + ((float) siteRecord.humanPageCount / siteRecord.pageCount * 100) + "%"); } } /** * Helper method that extracts the list of all {@link ItemIdValue} objects * that are used as values in the given statement group. * * @param statementGroup * the {@link StatementGroup} to extract the data from * @return the list of values */ private List getItemIdValueList(StatementGroup statementGroup) { List result = new ArrayList<>(statementGroup.size()); for (Statement s : statementGroup) { Value v = s.getValue(); if (v instanceof EntityIdValue) { result.add((EntityIdValue) v); } } return result; } /** * Checks if the given group of statements contains the given value as the * value of a main snak of some statement. * * @param statementGroup * the statement group to scan * @param value * the value to scan for * @return true if value was found */ private boolean containsValue(StatementGroup statementGroup, Value value) { for (Statement s : statementGroup) { if (value.equals(s.getValue())) { return true; } } return false; } /** * Adds a new gender item and an initial name. * * @param entityIdValue * the item representing the gender * @param name * the label to use for representing the gender */ private void addNewGenderName(EntityIdValue entityIdValue, String name) { this.genderNames.put(entityIdValue, name); this.genderNamesList.add(entityIdValue); } /** * Returns a site record for the site of the given name, creating a new one * if it does not exist yet. * * @param siteKey * the key of the site * @return the suitable site record */ private SiteRecord getSiteRecord(String siteKey) { SiteRecord siteRecord = this.siteRecords.get(siteKey); if (siteRecord == null) { siteRecord = new SiteRecord(siteKey); this.siteRecords.put(siteKey, siteRecord); } return siteRecord; } /** * Counts a single page of the specified gender. If this is the first page * of that gender on this site, a suitable key is added to the list of the * site's genders. * * @param gender * the gender to count * @param siteRecord * the site record to count it for */ private void countGender(EntityIdValue gender, SiteRecord siteRecord) { siteRecord.genderCounts.merge(gender, 1, Integer::sum); } } GreatestNumberProcessor.java000066400000000000000000000111421444772566300350120ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.math.BigDecimal; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; /** * This simple {@link EntityDocumentProcessor} finds the greatest number * assigned to a certain property. The property can be modified by changing the * value for {@link GreatestNumberProcessor#numberPropertyId}. * * @author Markus Kroetzsch * */ public class GreatestNumberProcessor implements EntityDocumentProcessor { static final String numberPropertyId = "P1113"; // "series length" // P1090 is "redshift" // P1351 is "number of points/goals scored" // P1350 is "number of matches played" // P1128 is "employees", P1101 is "floors above ground" // P1174 is "visitors per year", P1183 is "seat capacity" ItemIdValue largestNumberItem; String largestNumberItemLabel; BigDecimal largestNumberValue; int itemsWithPropertyCount = 0; int itemCount = 0; /** * Main method. Processes the whole dump using this processor. To change * which dump file to use and whether to run in offline mode, modify the * settings in {@link ExampleHelpers}. * * @param args * @throws IOException */ public static void main(String[] args) throws IOException { ExampleHelpers.configureLogging(); GreatestNumberProcessor.printDocumentation(); GreatestNumberProcessor processor = new GreatestNumberProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.printStatus(); } @Override public void processItemDocument(ItemDocument itemDocument) { this.itemCount++; // Find the first quantity value for this property, if any: QuantityValue quantityValue = itemDocument .findStatementQuantityValue(numberPropertyId); // If a value was found, compare it to the current maximum: if (quantityValue != null) { this.itemsWithPropertyCount++; BigDecimal numericValue = quantityValue.getNumericValue(); if (this.largestNumberValue == null || numericValue.compareTo(this.largestNumberValue) > 0) { this.largestNumberValue = numericValue; this.largestNumberItem = itemDocument.getEntityId(); MonolingualTextValue label = itemDocument.getLabels().get("en"); if (label != null) { this.largestNumberItemLabel = label.getText(); } else { this.largestNumberItemLabel = this.largestNumberItem .getId(); } } } // Print progress every 100,000 items: if (this.itemCount % 100000 == 0) { printStatus(); } } /** * Prints the current status, time and entity count. */ public void printStatus() { System.out.println("Found " + this.itemsWithPropertyCount + " matching items after scanning " + this.itemCount + " items."); if (this.largestNumberValue != null) { System.out.println("The item with the greatest number is: " + this.largestNumberItemLabel + " (" + this.largestNumberItem.getId() + ") with number " + this.largestNumberValue); } else { System.out.println("No number with a specified value found yet."); } } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: GreatestNumberProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will scan the dump to find the item with the greatest value"); System.out.println("*** for property " + numberPropertyId + "."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } } JsonSerializationProcessor.java000066400000000000000000000137141444772566300355410ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelFilter; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.helpers.JsonSerializer; import org.wikidata.wdtk.datamodel.interfaces.*; /** * This example illustrates how to create a JSON serialization of some of the * data found in a dump. It uses a {@link DatamodelFilter}to eliminate some of the data. *

* As an example, the program only serializes data for people who were born in * Dresden, Germany. This can be changed by modifying the code in * {@link #includeDocument(ItemDocument)}. * * @author Markus Kroetzsch * */ public class JsonSerializationProcessor implements EntityDocumentProcessor { static final String OUTPUT_FILE_NAME = "json-serialization-example.json.gz"; final JsonSerializer jsonSerializer; /** * Object used to make simplified copies of Wikidata documents for * re-serialization in JSON. */ final DatamodelFilter datamodelFilter; /** * Runs the example program. * * @param args * @throws IOException * if there was a problem in writing the output file */ public static void main(String[] args) throws IOException { ExampleHelpers.configureLogging(); JsonSerializationProcessor.printDocumentation(); JsonSerializationProcessor jsonSerializationProcessor = new JsonSerializationProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(jsonSerializationProcessor); jsonSerializationProcessor.close(); } /** * Constructor. Initializes various helper objects we use for the JSON * serialization, and opens the file that we want to write to. * * @throws IOException * if there is a problem opening the output file */ public JsonSerializationProcessor() throws IOException { //Configuration of the filter DocumentDataFilter documentDataFilter = new DocumentDataFilter(); // Only copy English labels, descriptions, and aliases: documentDataFilter.setLanguageFilter(Collections.singleton("en")); // Only copy statements of some properties: Set propertyFilter = new HashSet<>(); propertyFilter.add(Datamodel.makeWikidataPropertyIdValue("P18")); // image propertyFilter.add(Datamodel.makeWikidataPropertyIdValue("P106")); // occupation propertyFilter.add(Datamodel.makeWikidataPropertyIdValue("P569")); // birthdate documentDataFilter.setPropertyFilter(propertyFilter); // Do not copy any sitelinks: documentDataFilter.setSiteLinkFilter(Collections.emptySet()); // The filter is used to remove some parts from the documents we // serialize. this.datamodelFilter = new DatamodelFilter(new DataObjectFactoryImpl(), documentDataFilter); // The (compressed) file we write to. OutputStream outputStream = new GzipCompressorOutputStream( new BufferedOutputStream( ExampleHelpers .openExampleFileOuputStream(OUTPUT_FILE_NAME))); this.jsonSerializer = new JsonSerializer(outputStream); this.jsonSerializer.open(); } @Override public void processItemDocument(ItemDocument itemDocument) { if (includeDocument(itemDocument)) { this.jsonSerializer.processItemDocument(this.datamodelFilter.filter(itemDocument)); } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { // we do not serialize any properties } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: JsonSerializationProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will filter the data and store the results in a new JSON file."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } /** * Closes the output. Should be called after the JSON serialization was * finished. */ public void close() { System.out.println("Serialized " + this.jsonSerializer.getEntityDocumentCount() + " item documents to JSON file " + OUTPUT_FILE_NAME + "."); this.jsonSerializer.close(); } /** * Returns true if the given document should be included in the * serialization. * * @param itemDocument * the document to check * @return true if the document should be serialized */ private boolean includeDocument(ItemDocument itemDocument) { for (StatementGroup sg : itemDocument.getStatementGroups()) { // "P19" is "place of birth" on Wikidata if (!"P19".equals(sg.getProperty().getId())) { continue; } for (Statement s : sg) { if (s.getMainSnak() instanceof ValueSnak) { Value v = s.getValue(); // "Q1731" is "Dresden" on Wikidata if (v instanceof ItemIdValue && "Q1731".equals(((ItemIdValue) v).getId())) { return true; } } } } return false; } } LifeExpectancyProcessor.java000066400000000000000000000124111444772566300347660ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import java.io.IOException; import java.io.PrintStream; /** * This document processor calculates the average life expectancy of people, * based on property ids used on Wikidata. The results can be written to the * file life-expectancies.csv in the example results directory. *

* Note that the computation of life expectancies based on the life spans of * people who have died already has some systematic bias, since none of the * sampled person is expected to die in the future. * * @author Markus Kroetzsch * */ public class LifeExpectancyProcessor implements EntityDocumentProcessor { long totalPeopleCount = 0; long totalLifeSpan = 0; boolean printedStatus = true; // Simply store data indexed by year of birth, in a range from 0 to 2100: final long[] lifeSpans = new long[2100]; final long[] peopleCount = new long[2100]; /** * Main method. Processes the whole dump using this processor and writes the * results to a file. To change which dump file to use and whether to run in * offline mode, modify the settings in {@link ExampleHelpers}. * */ public static void main(String[] args) { ExampleHelpers.configureLogging(); LifeExpectancyProcessor.printDocumentation(); LifeExpectancyProcessor processor = new LifeExpectancyProcessor(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.writeFinalResults(); } @Override public void processItemDocument(ItemDocument itemDocument) { int birthYear = getYearIfAny(itemDocument, "P569"); int deathYear = getYearIfAny(itemDocument, "P570"); if (birthYear != Integer.MIN_VALUE && deathYear != Integer.MIN_VALUE && birthYear >= 1200) { // Do some more sanity checks to filter strange values: if (deathYear > birthYear && deathYear - birthYear < 130) { lifeSpans[birthYear] += (deathYear - birthYear); peopleCount[birthYear]++; totalLifeSpan += (deathYear - birthYear); totalPeopleCount++; printedStatus = false; } } // Print the status once in a while: if (!printedStatus && totalPeopleCount % 10000 == 0) { printStatus(); printedStatus = true; } } /** * Writes the results of the processing to a file. */ public void writeFinalResults() { printStatus(); try (PrintStream out = new PrintStream( ExampleHelpers .openExampleFileOuputStream("life-expectancies.csv"))) { for (int i = 0; i < lifeSpans.length; i++) { if (peopleCount[i] != 0) { out.println(i + "," + (double) lifeSpans[i] / peopleCount[i] + "," + peopleCount[i]); } } } catch (IOException e) { e.printStackTrace(); } } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: LifeExpectancyProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will compute the average life expectancy of persons found"); System.out .println("*** In the data. Results will be stored in a CSV file."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } /** * Prints the current status to the system output. */ private void printStatus() { if (this.totalPeopleCount != 0) { System.out.println("Found " + totalPeopleCount + " people with an average life span of " + (float) totalLifeSpan / totalPeopleCount + " years."); } else { System.out.println("Found no people yet."); } } /** * Helper method that finds the first value of a time-valued property (if * any), and extracts an integer year. It checks if the value has sufficient * precision to extract an exact year. * * @param document * the document to extract the data from * @param propertyId * the string id of the property to look for * @return the year, or Interger.MIN_VALUE if none was found */ private int getYearIfAny(StatementDocument document, String propertyId) { TimeValue date = document.findStatementTimeValue(propertyId); if (date != null && date.getPrecision() >= TimeValue.PREC_YEAR) { return (int) date.getYear(); } else { return Integer.MIN_VALUE; } } } LocalDumpFileExample.java000066400000000000000000000062201444772566300341600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.examples; import org.wikidata.wdtk.dumpfiles.DumpContentType; import org.wikidata.wdtk.dumpfiles.DumpProcessingController; import org.wikidata.wdtk.dumpfiles.EntityTimerProcessor; import org.wikidata.wdtk.dumpfiles.MwLocalDumpFile; /** * This class illustrates how to process local dumpfiles. It uses * {@link EntityTimerProcessor} to process a dump. * * @author Markus Damm * */ public class LocalDumpFileExample { /** * Path to the dump that should be processed */ private final static String DUMP_FILE = "./src/resources/sample-dump-20150815.json.gz"; public static void main(String[] args) { ExampleHelpers.configureLogging(); LocalDumpFileExample.printDocumentation(); DumpProcessingController dumpProcessingController = new DumpProcessingController( "wikidatawiki"); // Note that the project name "wikidatawiki" is only for online access; // not relevant here. EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor(0); dumpProcessingController.registerEntityDocumentProcessor( entityTimerProcessor, null, true); // Select local file (meta-data will be guessed): System.out.println(); System.out .println("Processing a local dump file giving only its location"); System.out .println("(meta-data like the date is guessed from the file name):"); MwLocalDumpFile mwDumpFile = new MwLocalDumpFile(DUMP_FILE); dumpProcessingController.processDump(mwDumpFile); // Select local file and set meta-data: System.out.println(); System.out .println("Processing a local dump file with all meta-data set:"); mwDumpFile = new MwLocalDumpFile(DUMP_FILE, DumpContentType.JSON, "20150815", "wikidatawiki"); dumpProcessingController.processDump(mwDumpFile); entityTimerProcessor.close(); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: LocalDumpFileExample"); System.out.println("*** "); System.out .println("*** This program illustrates how to process local dumps."); System.out .println("*** It uses an EntityTimerProcesses which counts processed items"); System.out.println("*** and elapsed time."); System.out.println("*** "); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } } OnlineMediaInfoExample.java000066400000000000000000000053201444772566300345000ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.*; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import java.io.IOException; /** * This example shows how to retrieve MediaInfo data from the API. * * @author Thomas Pellissier Tanon * */ public class OnlineMediaInfoExample { public static void main(String[] args) throws IOException, MediaWikiApiErrorException { ExampleHelpers.configureLogging(); printDocumentation(); WikibaseDataFetcher commonsDataFetcher = WikibaseDataFetcher.getWikimediaCommonsDataFetcher(); WikibaseDataFetcher wikidataDataFetcher = WikibaseDataFetcher.getWikidataDataFetcher(); System.out.println("*** Retrieving a media info document ..."); MediaInfoDocument mediaInfoDocument = (MediaInfoDocument) commonsDataFetcher.getEntityDocumentByTitle("commonswiki", "File:Black hole - Messier 87 crop max res.jpg"); // Print the English caption System.out.println("Caption: " + mediaInfoDocument.getLabels().get("en").getText()); // Print the depict with labels from Wikidata: for(Statement statement : mediaInfoDocument.findStatementGroup("P180").getStatements()) { Value value = statement.getValue(); if(value instanceof ItemIdValue) { ItemDocument depict = (ItemDocument) wikidataDataFetcher.getEntityDocument(((ItemIdValue) value).getId()); System.out.println("Depict: " + depict.getLabels().get("en").getText() + "(" + depict.getEntityId().getIri() + ")"); } } System.out.println("*** Done."); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out.println("********************************************************************"); System.out.println("*** Wikidata Toolkit: MediaInfoDataExample"); System.out.println("*** "); System.out.println("*** It does not download any dump files. See source code for details."); System.out.println("********************************************************************"); } } RdfSerializationExample.java000066400000000000000000000136611444772566300347600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.*; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.commons.compress.compressors.gzip.GzipParameters; import org.eclipse.rdf4j.rio.RDFFormat; import org.wikidata.wdtk.datamodel.interfaces.Sites; import org.wikidata.wdtk.dumpfiles.DumpProcessingController; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfSerializer; /** * This class shows how convert data from wikidata.org to RDF in N-Triples format. The * compressed output will be written into an output file. * * @author Michael Günther * @author Markus Kroetzsch */ public class RdfSerializationExample { public static void main(String[] args) throws IOException { // Define where log messages go ExampleHelpers.configureLogging(); // Print information about this program printDocumentation(); // Initialize sites; only needed to link to Wikipedia pages in RDF DumpProcessingController dumpProcessingController = new DumpProcessingController( "wikidatawiki"); dumpProcessingController.setOfflineMode(ExampleHelpers.OFFLINE_MODE); Sites sites = dumpProcessingController.getSitesInformation(); // Prepare a compressed output stream to write the data to // (admittedly, this is slightly over-optimized for an example) try(OutputStream bufferedFileOutputStream = new BufferedOutputStream( ExampleHelpers.openExampleFileOuputStream("wikidata-simple-statements.nt.gz"), 1024 * 1024 * 5 )) { GzipParameters gzipParameters = new GzipParameters(); gzipParameters.setCompressionLevel(7); OutputStream compressorOutputStream = new GzipCompressorOutputStream( bufferedFileOutputStream, gzipParameters); OutputStream exportOutputStream = asynchronousOutputStream(compressorOutputStream); // Create a serializer processor RdfSerializer serializer = new RdfSerializer(RDFFormat.NTRIPLES, exportOutputStream, sites, PropertyRegister.getWikidataPropertyRegister()); // Serialize simple statements (and nothing else) for all items serializer.setTasks(RdfSerializer.TASK_ITEMS | RdfSerializer.TASK_SIMPLE_STATEMENTS); // Run serialization serializer.open(); ExampleHelpers.processEntitiesFromWikidataDump(serializer); serializer.close(); } } /** * Print some basic documentation about this program. */ private static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: RDF Serialization Example"); System.out.println("*** "); System.out .println("*** This program will download dumps from Wikidata and serialize the data in a RDF format."); System.out .println("*** Downloading may take some time initially. After that, files"); System.out .println("*** are stored on disk and are used until newer dumps are available."); System.out .println("*** You can delete files manually when no longer needed (see "); System.out .println("*** message below for the directory where dump files are found)."); System.out .println("********************************************************************"); } /** * Creates a separate thread for writing into the given output stream and * returns a pipe output stream that can be used to pass data to this * thread. *

* This code is inspired by * http://stackoverflow.com/questions/12532073/gzipoutputstream * -that-does-its-compression-in-a-separate-thread * * @param outputStream * the stream to write to in the thread * @return a new stream that data should be written to * @throws IOException * if the pipes could not be created for some reason */ public static OutputStream asynchronousOutputStream( final OutputStream outputStream) throws IOException { final int SIZE = 1024 * 1024 * 10; final PipedOutputStream pos = new PipedOutputStream(); final PipedInputStream pis = new PipedInputStream(pos, SIZE); final Thread worker = new Thread(() -> { try { byte[] bytes = new byte[SIZE]; for (int len; (len = pis.read(bytes)) > 0;) { outputStream.write(bytes, 0, len); } } catch (IOException ioException) { ioException.printStackTrace(); } finally { close(pis); close(outputStream); } }, "async-output-stream"); return new SyncCloseOutputStream(pos, worker); } /** * Helper class that joins a thread on a call to close, to ensure that the output stream has really been closed. */ private static final class SyncCloseOutputStream extends FilterOutputStream { private final Thread worker; public SyncCloseOutputStream(OutputStream out, Thread worker) { super(out); this.worker = worker; } @Override public void close() throws IOException { super.close(); try { worker.join(); } catch (InterruptedException e) { e.printStackTrace(); } } } /** * Closes a Closeable and swallows any exceptions that might occur in the * process. * * @param closeable */ static void close(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (IOException ignored) { } } } } Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/SitelinksExample.java000066400000000000000000000102371444772566300335270ustar00rootroot00000000000000package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Sites; import org.wikidata.wdtk.dumpfiles.DumpProcessingController; /** * This class demonstrates how to get access to information about sitelinks in * Wikidata.org. The data generally uses keys like "enwiki" to identify sites. * To find out what these keys mean, Wikidata Toolkit can be used to download * and process the dump of the MediaWiki sites table. The resulting * {@link Sites} object can be used to resolve links to other sites, and also * can be applied to {@link SiteLink} objects as found in the Wikidata data. * Other information obtained from the sites table includes the site language, * whether it is a MediaWiki site, and which group it has been assigned to. The * groups are used to define which sites can be used for entering site links in * Wikibase, but the sites table does not tell us which groups are currently * enabled for site links. * * @author Markus Kroetzsch * */ public class SitelinksExample { public static void main(String[] args) throws IOException { // Define where log messages go ExampleHelpers.configureLogging(); // Print information about this program printDocumentation(); // Controller object for processing dumps: DumpProcessingController dumpProcessingController = new DumpProcessingController( "wikidatawiki"); dumpProcessingController.setOfflineMode(ExampleHelpers.OFFLINE_MODE); // Download the sites table dump and extract information Sites sites = dumpProcessingController.getSitesInformation(); // Access the data to find some information System.out .println("********************************************************************"); System.out.println("*** Completed processing of sites table."); System.out.println("*** Examples:"); System.out .println("*** URL of the page \"Dresden\" on German Wikipedia: " + sites.getPageUrl("dewiki", "Dresden")); System.out .println("*** URL of the page \"ڈگلس ایڈم\" on Urdu Wikipedia: " + sites.getPageUrl("urwiki", "ڈگلس ایڈم")); System.out .println("*** URL of the page \"Special:EntityData/Q1.json\" on Wikidata: " + sites.getPageUrl("wikidatawiki", "Special:EntityData/Q1.json")); System.out .println("*** Main language of the site identified by \"frwikiquote\": " + sites.getLanguageCode("frwikiquote")); System.out .println("*** Group of the site identified by \"zhwikivoyage\": " + sites.getGroup("zhwikivoyage")); System.out .println("*** URL of the file \"api.php\" on English Wikipedia: " + sites.getFileUrl("enwiki", "api.php")); } /** * Print some basic documentation about this program. */ private static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: Sitelink Processing Example"); System.out.println("*** "); System.out .println("*** This program will download and process site link information from"); System.out .println("*** Wikidata. Downloaded files are stored on disk and are used until"); System.out .println("*** newer dump are available. You can delete files manually when no"); System.out .println("*** longer needed (see message below for the directory where files are found)."); System.out .println("********************************************************************"); } } TutorialDocumentProcessor.java000066400000000000000000000161561444772566300353770ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; /** * This is a simple template for an {@link EntityDocumentProcessor} that can be * modified to try your own code. *

* Exercise 1: Just run the code as it is and have a look at the output. It will * print a lot of data about item documents to the console. You can see roughly * what the data looks like. Find the data for one item and look up the item on * wikidata.org. Find the data that you can see on the Web page in the print out * (note that some details might have changed since you local data is based on a * dump). *

* Exercise 2: The code below already counts how many items and properties it * processes. Add additional counters to count: (1) the number of labels, (2) * the number of aliases, (3) the number of statements, (4) the number of site * links. Print this data at the end or write it to the file if you like. *

* Exercise 3: Extend your code from Exercise 2 to count how many items have a * link to English Wikipedia (or another Wikipedia of your choice). The site * identifier used in the data for English Wikipedia is "enwiki". *

* Exercise 4: Building on the code of Exercise 3, count the number of site * links for all sites that are linked. Use, for example, a hashmap to store * integer counters for each site id you encounter. Print the results to a CSV * file and load the file into a spreadsheet application (this can also be an * online application such as Google Drive). You can order the data by count and * create a diagram. The number of site links should be close to the number of * articles in the project. *

* Exercise 5: Compute the average life expectancy of people on Wikidata. To do * this, consider items with a birth date (P569) and death date (P570). Whenever * both dates are found, compute the difference of years between the dates. * Store the sum of these lifespans (in years) and the number of people for * which you recorded a lifespace to compute the average. Some hints: *

    *
  • There can be more than one statement for any property, even for date of * birth/death (if there are different opinions). For simplicity, just use the * first.
  • *
  • Dates can be uncertain. This is expressed by their precision, * {@link TimeValue#getPrecision()}. You should only consider values with * precision greater or equal to {@link TimeValue#PREC_DAY}.
  • *
*

* Exercise 6: Compute the average life span as in Exercise 5, but now grouped * by year of birth. This will show you how life expectancy changed over time * (at least for people with Wikipedia articles). For this, create arrays or * maps to store the sum of the lifespan and number of people for each year of * birth. Finally, compute all the averages and store them to a CSV file that * gives the average life expectancy for each year of birth. Load this file into * a spreadsheet too to create a diagram. What do you notice? Some hints: *

    *
  • An easy way to store the numbers you need for each year of birth is to * use an array where the year is the index. This is possible here since you * know that years should be in a certain range. You could also use a Hashmap, * of course, but sorting by key is more work in this case.
  • *
  • The data can contain errors. If you see strange effects in the results, * maybe you need to filter some unlikely cases.
  • *
  • To get a smooth trend for life expectancy, you need to have at least a * few people for every year of birth. It might be a good idea to consider only * people born after the year 1200 to make sure that you have enough precise * data.
  • *
* * @author Markus Kroetzsch * */ public class TutorialDocumentProcessor implements EntityDocumentProcessor { private long countItems = 0; private long countProperties = 0; /** * Processes one item document. This is often the main workhorse that * gathers the data you are interested in. You can modify this code as you * wish. */ @Override public void processItemDocument(ItemDocument itemDocument) { this.countItems++; // Do some printing for demonstration/debugging. // Only print at most 50 items (or it would get too slow). if (this.countItems < 10) { System.out.println(itemDocument); } else if (this.countItems == 10) { System.out.println("*** I won't print any further items.\n" + "*** We will never finish if we print all the items.\n" + "*** Maybe remove this debug output altogether."); } } /** * Processes one property document. Property documents mainly tell you the * name and datatype of properties. It can be useful to process all * properties first to store data about them that is useful when processing * items. There are not very many properties (about 1100 as of August 2014), * so it is safe to store all their data for later use. */ @Override public void processPropertyDocument(PropertyDocument propertyDocument) { this.countProperties++; // For testing; disable when no longer needed: if (this.countProperties < 10) { System.out.println(propertyDocument); } else if (this.countItems == 10) { System.out .println("*** I won't print any further properties.\n" + "*** Otherwise you would see only properties and no items.\n" + "*** Maybe remove this debug output altogether."); } } /** * Stores the processing results in a file. CSV (comma separated values) is * a simple format that makes sense for such tasks. It can be imported * easily into spreadsheet tools to generate diagrams from the data. */ public void storeResults() { System.out.println("Processed " + countItems + " items and " + countProperties + " properties in total."); System.out.println("Storing data ..."); try (PrintStream out = new PrintStream(new FileOutputStream( "tutorial-results.csv"))) { // Two simple entries for demonstration purposes. // Use your own code when you have more interesting data. out.println("count of items," + countItems); out.println("count of properties," + countProperties); } catch (IOException e) { System.out.println("Oops, I could not write the file: " + e.toString()); } System.out.println("... data stored."); } } Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/TutorialExample.java000066400000000000000000000051051444772566300333630ustar00rootroot00000000000000package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.dumpfiles.DumpProcessingController; import org.wikidata.wdtk.dumpfiles.MwRevision; import org.wikidata.wdtk.dumpfiles.StatisticsMwRevisionProcessor; /** * This example application applies an {@link EntityDocumentProcessor} to all * documents in a Wikidata dump file. By default, the EntityDocumentProcessor is * {@link TutorialDocumentProcessor}. *

* This application is based on the regular data exports provide by Wikidata. By * default, it will run in offline mode. This will only work if you already have * some dump downloaded before. The easiest way of doing this is to disable * offline mode in the source code; the program will then do the downloading for * you. * * @author Markus Kroetzsch * */ public class TutorialExample { public static void main(String[] args) { ExampleHelpers.configureLogging(); // Controller object for processing dumps: DumpProcessingController dumpProcessingController = new DumpProcessingController( "wikidatawiki"); // Work offline. Only works if you already have a dump downloaded dumpProcessingController.setOfflineMode(true); // Example processor for item and property documents: TutorialDocumentProcessor documentProcessor = new TutorialDocumentProcessor(); dumpProcessingController.registerEntityDocumentProcessor( documentProcessor, MwRevision.MODEL_WIKIBASE_ITEM, true); dumpProcessingController.registerEntityDocumentProcessor( documentProcessor, MwRevision.MODEL_WIKIBASE_PROPERTY, true); // Another processor for statistics & time keeping: dumpProcessingController.registerMwRevisionProcessor( new StatisticsMwRevisionProcessor("statistics", 10000), null, true); // Run the processing: dumpProcessingController.processMostRecentMainDump(); // Store the results: documentProcessor.storeResults(); } } WorldMapProcessor.java000066400000000000000000000333641444772566300336220ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examplespackage org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.awt.image.BufferedImage; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.imageio.ImageIO; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.Value; /** * This example class processes EntityDocuments to create a map image that shows * the distribution of items with geographic coordinates on Earth. Several maps * are generated, for Wikidata as a whole and for several big Wikipedias. *

* The size of the images, the projects that are included, and the brightness of * the maps can be modified in the main method. * * @author Markus Kroetzsch * */ public class WorldMapProcessor implements EntityDocumentProcessor { /** * The property id that encodes coordinates. */ static final String COORD_PROPERTY = "P625"; /** * Colors to use on the color scale, each specificed as {r,g,b}. */ static int[][] colors = { { 0, 0, 150 }, { 24, 99, 9 }, { 227, 70, 0 }, { 255, 214, 30 }, { 255, 255, 255 } }; /** * The width in pixels of the map image that is created. */ final int width; /** * The height in pixels of the map image that is created. */ final int height; /** * The total number of coordinates encountered so far. */ int count = 0; /** * Value at which the brightest color will be reached. */ final int topValue; /** * All maps for which data is recorded. */ Set valueMaps = new HashSet<>(); /** * Number of articles with coordinates per site. */ final Map siteCounts = new HashMap<>(); /** * Identifier of the globe for which coordinates are gathered. */ String globe = GlobeCoordinatesValue.GLOBE_EARTH; /** * Main method. Processes the whole dump using this processor and writes the * results to a file. To change which dump file to use and whether to run in * offline mode, modify the settings in {@link ExampleHelpers}. * * @param args */ public static void main(String[] args) { ExampleHelpers.configureLogging(); WorldMapProcessor.printDocumentation(); int imageWidth = 8 * 360; double brightness = 1.0; WorldMapProcessor worldMapProcessor = new WorldMapProcessor(imageWidth, brightness); // worldMapProcessor.setGlobe(GlobeCoordinatesValue.GLOBE_MOON); // using the Moon or anything else but Earth might need some brightness // adjustment above, and possibly more frequent reporting below worldMapProcessor.addSite(null); // all data, no filter // Some other sites, ranked by the number of geolocated items they had // as of June 2015: worldMapProcessor.addSite("enwiki"); worldMapProcessor.addSite("dewiki"); worldMapProcessor.addSite("frwiki"); worldMapProcessor.addSite("plwiki"); worldMapProcessor.addSite("nlwiki"); worldMapProcessor.addSite("ruwiki"); worldMapProcessor.addSite("eswiki"); worldMapProcessor.addSite("itwiki"); worldMapProcessor.addSite("zhwiki"); worldMapProcessor.addSite("ptwiki"); // worldMapProcessor.addSite("ukwiki"); // worldMapProcessor.addSite("svwiki"); // worldMapProcessor.addSite("viwiki"); // worldMapProcessor.addSite("srwiki"); // worldMapProcessor.addSite("cawiki"); // worldMapProcessor.addSite("shwiki"); // worldMapProcessor.addSite("mswiki"); // worldMapProcessor.addSite("rowiki"); // worldMapProcessor.addSite("fawiki"); // worldMapProcessor.addSite("jawiki"); // worldMapProcessor.addSite("vowiki"); // worldMapProcessor.addSite("warwiki"); // worldMapProcessor.addSite("commonswiki"); // worldMapProcessor.addSite("arwiki"); ExampleHelpers.processEntitiesFromWikidataDump(worldMapProcessor); worldMapProcessor.writeFinalData(); } /** * Creates a new processor for building world maps. * * @param width * horizontal size of the world map; the map's height is half of * this (plus some pixels for printing the scale) * @param brightness * parameter for scaling up the brightness of colors; the default * is 1.0; higher values make smaller numbers appear more * brightly; smaller numbers darken smaller numbers and thus help * to highlight the biggest concentrations of items */ public WorldMapProcessor(int width, double brightness) { this.width = width; this.height = width / 2; this.topValue = (int) ((1600 * 360 * 180) / (brightness * this.width * this.height)); } /** * Registers a new site for specific data collection. If null is used as a * site key, then all data is collected. * * @param siteKey * the site to collect geo data for */ public void addSite(String siteKey) { ValueMap gv = new ValueMap(siteKey); this.valueMaps.add(gv); } /** * Sets the globe on which coordinates should be gathered. This should be an * entity URI, e.g., {@link GlobeCoordinatesValue#GLOBE_EARTH}. * * @param globe */ public void setGlobe(String globe) { this.globe = globe; } @Override public void processItemDocument(ItemDocument itemDocument) { for (StatementGroup sg : itemDocument.getStatementGroups()) { if (COORD_PROPERTY.equals(sg.getProperty().getId())) { for (Statement s : sg) { countCoordinateStatement(s, itemDocument); } } } } /** * Counts the coordinates stored in a single statement for the relevant * property, if they are actually given and valid. * * @param statement * @param itemDocument */ private void countCoordinateStatement(Statement statement, ItemDocument itemDocument) { Value value = statement.getValue(); if (!(value instanceof GlobeCoordinatesValue)) { return; } GlobeCoordinatesValue coordsValue = (GlobeCoordinatesValue) value; if (!this.globe.equals((coordsValue.getGlobe()))) { return; } int xCoord = (int) (((coordsValue.getLongitude() + 180.0) / 360.0) * this.width) % this.width; int yCoord = (int) (((coordsValue.getLatitude() + 90.0) / 180.0) * this.height) % this.height; if (xCoord < 0 || yCoord < 0 || xCoord >= this.width || yCoord >= this.height) { System.out.println("Dropping out-of-range coordinate: " + coordsValue); return; } countCoordinates(xCoord, yCoord, itemDocument); this.count += 1; if (this.count % 100000 == 0) { reportProgress(); writeImages(); } } /** * Counts a single pair of coordinates in all datasets. * * @param xCoord * @param yCoord * @param itemDocument */ private void countCoordinates(int xCoord, int yCoord, ItemDocument itemDocument) { for (String siteKey : itemDocument.getSiteLinks().keySet()) { this.siteCounts.merge(siteKey, 1, Integer::sum); } for (ValueMap vm : this.valueMaps) { vm.countCoordinates(xCoord, yCoord, itemDocument); } } /** * Writes all collected data to files after processing is finished. */ public void writeFinalData() { reportProgress(); writeImages(); } /** * Writes image files for all data that was collected and the statistics * file for all sites. */ private void writeImages() { for (ValueMap gv : this.valueMaps) { gv.writeImage(); } try (PrintStream out = new PrintStream( ExampleHelpers.openExampleFileOuputStream("map-site-count.csv"))) { out.println("Site key,Number of geo items"); out.println("wikidata total," + this.count); for (Entry entry : this.siteCounts.entrySet()) { out.println(entry.getKey() + "," + entry.getValue()); } } catch (IOException e) { e.printStackTrace(); } } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: WorldMapProcessor"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will collect geographic coordinates from the data to "); System.out.println("***create a map that is stored in an image file."); System.out.println("*** See source code for further details."); System.out .println("********************************************************************"); } /** * Prints the progress for all data collections. */ private void reportProgress() { for (ValueMap gv : this.valueMaps) { gv.reportProgress(); } } /** * Returns a color for a given absolute number that is to be shown on the * map. * * @param value * @return */ private int getColor(int value) { if (value == 0) { return 0; } double scale = Math.log10(value) / Math.log10(this.topValue); double lengthScale = Math.min(1.0, scale) * (colors.length - 1); int index = 1 + (int) lengthScale; if (index == colors.length) { index--; } double partScale = lengthScale - (index - 1); int r = (int) (colors[index - 1][0] + partScale * (colors[index][0] - colors[index - 1][0])); int g = (int) (colors[index - 1][1] + partScale * (colors[index][1] - colors[index - 1][1])); int b = (int) (colors[index - 1][2] + partScale * (colors[index][2] - colors[index - 1][2])); r = Math.min(255, r); b = Math.min(255, b); g = Math.min(255, g); return (r << 16) | (g << 8) | b; } /** * Class to collect the data for one particular map, e.g., for coordinates * of items with German Wikipedia articles. Objects of the class aggregate * all relevant data and finally create the output file for the current * settings. * * @author Markus Kroetzsch * */ class ValueMap { final int[][] values; final String siteFilter; int count = 0; int maxValue = 1; // avoid drawing scale with 0 elements /** * Constructor. * * @param siteFilter */ public ValueMap(String siteFilter) { this.values = new int[WorldMapProcessor.this.width][WorldMapProcessor.this.height]; this.siteFilter = siteFilter; } /** * Counts the given coordinates, unless the item document is filtered. * It is assumed that the coordinates are in the admissible range. * * @param xCoord * @param yCoord * @param itemDocument */ public void countCoordinates(int xCoord, int yCoord, ItemDocument itemDocument) { if (this.siteFilter != null) { if (!itemDocument.getSiteLinks().containsKey(this.siteFilter)) { return; } } this.count++; this.values[xCoord][yCoord] += 1; if (this.maxValue < this.values[xCoord][yCoord]) { this.maxValue = this.values[xCoord][yCoord]; } } /** * Writes the image file for the collected data. */ public void writeImage() { int width = WorldMapProcessor.this.width; int height = WorldMapProcessor.this.height; BufferedImage image = new BufferedImage(width, height + 13, BufferedImage.TYPE_INT_RGB); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { image.setRGB(x, height - 1 - y, getColor(this.values[x][y])); } } int previousValue = 0; int scaleMarkStep = 1; for (int x = 0; x < width; x++) { int value = (int) Math.exp(Math.log(10) * Math.log10(Math.max(10, this.maxValue)) * x / width); int color = getColor(value); if (value / scaleMarkStep > previousValue / scaleMarkStep) { if (value / (10 * scaleMarkStep) > previousValue / (10 * scaleMarkStep)) { scaleMarkStep = 10 * scaleMarkStep; } previousValue = value; continue; } for (int y = height + 12; y > height + 3; y--) { image.setRGB(x, y, color); } previousValue = value; } String fileName = "map-items"; if (this.siteFilter != null) { fileName += "-" + this.siteFilter; } if (!GlobeCoordinatesValue.GLOBE_EARTH .equals(WorldMapProcessor.this.globe)) { fileName += "-" + WorldMapProcessor.this.globe .substring(WorldMapProcessor.this.globe .lastIndexOf('Q')); } fileName += "-" + width + "x" + height + ".png"; try (FileOutputStream out = ExampleHelpers .openExampleFileOuputStream(fileName)) { ImageIO.write(image, "PNG", out); } catch (IOException e) { e.printStackTrace(); } } /** * Prints the progress of the data collection. */ public void reportProgress() { System.out.print("* Processed " + this.count + " coordinates"); if (this.siteFilter != null) { System.out.print(" for site " + this.siteFilter); } else { System.out.print(" in total"); } System.out.print(" (max. value: " + this.maxValue + ")"); System.out.println(); } @Override public int hashCode() { return ((siteFilter == null) ? 0 : siteFilter.hashCode()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof ValueMap)) { return false; } return this.siteFilter.equals(((ValueMap) obj).siteFilter); } } } Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/bots/000077500000000000000000000000001444772566300303475ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/bots/BotSettings.java000066400000000000000000000023421444772566300334600ustar00rootroot00000000000000package org.wikidata.wdtk.examples.bots; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Simple class to store bot configurations to use for all bots. *

* NEVER COMMIT YOUR PERSONAL CREDENTIALS TO GIT. * * @author Markus Kroetzsch * */ public class BotSettings { /** * User name to log in, or null to edit anonymously. */ static final String USERNAME = null; /** * Password for the given user. */ static final String PASSWORD = null; /** * True if the user should set a bot flag (if logged in and endowed with the * required rights). */ static final boolean EDIT_AS_BOT = true; } FixIntegerQuantityPrecisionsBot.java000066400000000000000000000332401444772566300374440ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/botspackage org.wikidata.wdtk.examples.bots; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import java.math.BigDecimal; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.examples.ExampleHelpers; import org.wikidata.wdtk.util.WebResourceFetcherImpl; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.LoginFailedException; import org.wikidata.wdtk.wikibaseapi.WikibaseDataEditor; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; /** * This bot adds changes quantity values of properties that are required to use * integers (such as population numbers) to be exact if they are now set to * +/-1. The latter is the default when editing through the API but not useful * there. *

* The activity of the bot is logged in the file * bot-log-fixintprec-TIMESTAMP.txt. Note that this log contains all edits that * would have been made, even if editing was disabled. Errors are logged to the * console only. *

* By default, this program has the actual editing disabled (see source code), * so as to avoid accidental modifications. The output will still mention * changes that would be done. If you want to do real edits, please respect the * bot etiquette and community guidelines. *

* The function of the bot is very basic, but it illustrates some important * techniques: *

    *
  • Scan a recent dump for items worth changing
  • *
  • Check the online version of each item before really changing it, and use * the online revision id to prevent edit conflicts
  • *
  • Update statements while preserving most of their content
  • *
  • Use basic bot configuration features (login, disable editing for test, * limited numbers of test edits)
  • *
* The bot is tried and tested, and has been used on Wikidata to perform its * task on several 1,000 items (see User:Makrobot). * * @author Markus Kroetzsch * */ public class FixIntegerQuantityPrecisionsBot implements EntityDocumentProcessor { /** * List of all integer properties considered by this bot. */ final static String[] integerProperties = { "P1082", // population "P1083", // capacity (seats etc.) "P1092", // total produced (product) "P1098", // number of speakers "P1099", // number of masts "P1100", // number of cylinders "P1101", // floors above ground "P1103", // number of platforms "P1104", // number of pages "P1110", // attendance (people attending an event) "P1111", // votes received "P1113", // series length "P1114", // quantity (how many?) "P1120", // number of deaths "P1128", // employees "P1129", // national team caps "P1132", // number of participants "P1139", // floors below ground "P1141", // number of processor cores "P1164", // cardinality of the group (in mathematics): "P1174", // visitors per year "P1301", // number of elevators "P1314", // number of spans (bridge) "P1339", // number of injured "P1342", // number of seats "P1345", // number of victims "P1350", // number of matches played "P1355", // wins (of sports matches) "P1356", // losses (of sports matches) "P1357", // matches/games drawn/tied "P1359", // number of points/goals conceded "P1373", // daily ridership "P1410", // number of seats of the organization in legislature "P1418", // number of orbits completed "P1436", // collection or exhibition size "P1446", // number of missing "P1538", // number of households "P1539", // female population "P1540", // male population "P1548", // maximum Strahler number (of rivers etc.) "P1561", // number of survivors "P1569", // number of edges "P1570", // number of vertices "P1590", // number of casualties "P1603", // number of cases (in medical outbreaks) "P1641", // port (-number; in computing) "P1658", // number of faces (of a mathematical solid) "P1831", // electorate (number of registered voters) "P1833", // number of registered users/contributors "P1867", // eligible voters "P1868", // ballots cast "P1872", // minimum number of players "P1873", // maximum number of players "P1971", // number of children "P2021", // Erdős number "P2103", // size of team at start "P2105", // size of team at finish "P2124", // membership "P2196", // students count }; final BasicApiConnection connection; final WikibaseDataEditor dataEditor; final WikibaseDataFetcher dataFetcher; /** * Number of entities modified so far. */ int modifiedEntities = 0; /** * Number of statements modified so far. */ int modifiedStatements = 0; /** * Number of statements modified so far, per property. */ Map modifiedStatementsByProperty = new HashMap<>(); /** * The place to write logging information to. */ final PrintStream logfile; /** * Main method to run the bot. * * @param args * @throws LoginFailedException * @throws IOException */ public static void main(String[] args) throws LoginFailedException, IOException { ExampleHelpers.configureLogging(); printDocumentation(); FixIntegerQuantityPrecisionsBot bot = new FixIntegerQuantityPrecisionsBot(); ExampleHelpers.processEntitiesFromWikidataDump(bot); bot.finish(); System.out.println("*** Done."); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: FixIntegerQuantitiesBot"); System.out.println("*** "); System.out .println("*** This bot downloads recent Wikidata dumps to locate items about"); System.out .println("*** that use quantity values for integer-valued properties, such as"); System.out .println("*** popluation, and checks if they have a precision of +/-1. In this"); System.out .println("*** case, it fixes their precision to be exact (+/-0)."); System.out .println("********************************************************************"); } /** * Constructor. * * @throws LoginFailedException * @throws IOException */ public FixIntegerQuantityPrecisionsBot() throws LoginFailedException, IOException { WebResourceFetcherImpl .setUserAgent("makrobot 0.4.0; Wikidata Toolkit; Java"); connection = BasicApiConnection.getWikidataApiConnection(); if (BotSettings.USERNAME != null) { connection.login(BotSettings.USERNAME, BotSettings.PASSWORD); } dataEditor = new WikibaseDataEditor(connection, Datamodel.SITE_WIKIDATA); dataEditor.setEditAsBot(BotSettings.EDIT_AS_BOT); dataEditor.disableEditing(); // do no actual edits // dataEditor.setRemainingEdits(5); // do at most 5 (test) edits dataFetcher = new WikibaseDataFetcher(connection, Datamodel.SITE_WIKIDATA); // Do not retrieve data that we don't care about here: dataFetcher.getFilter().excludeAllLanguages(); dataFetcher.getFilter().excludeAllSiteLinks(); // Initialise array to count for (String propertyId : integerProperties) { this.modifiedStatementsByProperty.put(propertyId, 0); } String timeStamp = new SimpleDateFormat("yyyyMMdd'T'HHmmss") .format(new Date()); this.logfile = new PrintStream( ExampleHelpers.openExampleFileOuputStream("bot-log-fixintprec-" + timeStamp + ".txt")); } @Override public void processItemDocument(ItemDocument itemDocument) { for (String propertyId : integerProperties) { if (hasPlusMinusOneValues(itemDocument .findStatementGroup(propertyId))) { fixIntegerPrecisions(itemDocument.getEntityId(), propertyId); } // else: ignore items that have no value or only correct values // for the property we consider } } /** * Finishes processing and makes sure that the log file is written. */ public void finish() { this.logfile.close(); System.out.println("### " + modifiedStatements + " statements modified: " + modifiedStatementsByProperty.toString()); } /** * Fetches the current online data for the given item, and fixes the * precision of integer quantities if necessary. * * @param itemIdValue * the id of the document to inspect * @param propertyId * id of the property to consider */ protected void fixIntegerPrecisions(ItemIdValue itemIdValue, String propertyId) { String qid = itemIdValue.getId(); try { // Fetch the online version of the item to make sure we edit the // current version: ItemDocument currentItemDocument = (ItemDocument) dataFetcher .getEntityDocument(qid); if (currentItemDocument == null) { System.out.println("*** " + qid + " could not be fetched. Maybe it has been deleted."); return; } // Get the current statements for the property we want to fix: StatementGroup editPropertyStatements = currentItemDocument .findStatementGroup(propertyId); if (editPropertyStatements == null) { System.out.println("*** " + qid + " no longer has any statements for " + propertyId); return; } PropertyIdValue property = Datamodel .makeWikidataPropertyIdValue(propertyId); List updateStatements = new ArrayList<>(); for (Statement s : editPropertyStatements) { QuantityValue qv = (QuantityValue) s.getValue(); if (qv != null && isPlusMinusOneValue(qv)) { QuantityValue exactValue = Datamodel.makeQuantityValue( qv.getNumericValue(), qv.getNumericValue(), qv.getNumericValue()); Statement exactStatement = StatementBuilder .forSubjectAndProperty(itemIdValue, property) .withValue(exactValue).withId(s.getStatementId()) .withQualifiers(s.getQualifiers()) .withReferences(s.getReferences()) .withRank(s.getRank()).build(); updateStatements.add(exactStatement); } } if (updateStatements.size() == 0) { System.out.println("*** " + qid + " quantity values for " + propertyId + " already fixed"); return; } logEntityModification(currentItemDocument.getEntityId(), updateStatements, propertyId); dataEditor.updateStatements(currentItemDocument, updateStatements, Collections.emptyList(), "Set exact values for [[Property:" + propertyId + "|" + propertyId + "]] integer quantities (Task MB2)", null); } catch (MediaWikiApiErrorException | IOException e) { e.printStackTrace(); } } /** * Logs information about entities changed so far. * * @param entityId * the id of the modified item * @param updateStatements * @param propertyId */ protected void logEntityModification(EntityIdValue entityId, List updateStatements, String propertyId) { modifiedEntities++; modifiedStatements += updateStatements.size(); modifiedStatementsByProperty.put( propertyId, modifiedStatementsByProperty.get(propertyId) + updateStatements.size()); System.out.println(entityId.getId() + ": fixing " + updateStatements.size() + " statement(s) for " + propertyId + " (" + modifiedEntities + " entities modified so far)"); this.logfile.println("\n==" + entityId.getId() + "==\n" + updateStatements.toString()); if (modifiedEntities % 10 == 0) { this.logfile.flush(); System.out.println("### " + modifiedStatements + " statements modified so far: " + modifiedStatementsByProperty.toString()); } } /** * Checks if the given value is a number with precision +/-1. * * @param quantityValue * @return */ protected boolean isPlusMinusOneValue(QuantityValue quantityValue) { BigDecimal valueSucc = quantityValue.getNumericValue().add( BigDecimal.ONE); BigDecimal valuePrec = quantityValue.getNumericValue().subtract( BigDecimal.ONE); return (quantityValue.getLowerBound().equals(valuePrec) && quantityValue.getUpperBound().equals(valueSucc) && "1" .equals(quantityValue.getUnit())); } /** * Checks if the given statement group contains at least one value of * precision +/-1. * * @param statementGroup * @return */ protected boolean hasPlusMinusOneValues(StatementGroup statementGroup) { if (statementGroup == null) { return false; } for (Statement s : statementGroup) { QuantityValue qv = (QuantityValue) s.getValue(); if (qv != null && isPlusMinusOneValue(qv)) { return true; } } return false; } } SetLabelsForNumbersBot.java000066400000000000000000000257141444772566300354720ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/botspackage org.wikidata.wdtk.examples.bots; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.Set; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.examples.ExampleHelpers; import org.wikidata.wdtk.util.WebResourceFetcherImpl; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.LoginFailedException; import org.wikidata.wdtk.wikibaseapi.WikibaseDataEditor; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; /** * This bot adds a default label to Wikidata items that are about numbers, by * simply creating a string for any integer number. Decimal numbers that are not * integers are not touched since they usually should have more meaningful * labels than the numerical representation. Moreover, the bot checks the class * (P31 value) of each item to make sure that only items about numbers are * re-labelled. *

* The activity of the bot is logged in the file * bot-log-setnumlabels-TIMESTAMP.txt. Note that this log contains all edits * that would have been made, even if editing was disabled. Errors are logged to * the console only. *

* By default, this program has the actual editing disabled (see source code), * so as to avoid accidental modifications. The output will still mention * changes that would be done. If you want to do real edits, please respect the * bot etiquette and community guidelines. *

* The function of the bot is very basic, but it illustrates some important * techniques: *

    *
  • Scan a recent dump for items worth changing
  • *
  • Check the online version of each item before really changing it, and use * the online revision id to prevent edit conflicts
  • *
  • Create data objects for writing
  • *
  • Use basic bot configuration features (login, disable editing for test, * limited numbers of test edits)
  • *
* The bot is tried and tested, and has been used on Wikidata to perform its * task on over 10,000 items (see User:Makrobot). * * @author Markus Kroetzsch * */ public class SetLabelsForNumbersBot implements EntityDocumentProcessor { final BasicApiConnection connection; final WikibaseDataEditor dataEditor; final WikibaseDataFetcher dataFetcher; /** * Number of entities modified so far. */ int modifiedEntities = 0; /** * The place to write logging information to. */ final PrintStream logfile; /** * List of language codes for those languages where it is meaningful to * label a number with an Arabic numeral string. There are more than those, * of course. */ final static String[] arabicNumeralLanguages = { "en", "de", "fr", "pt", "it", "es", "nl", "da", "ru" }; /** * Set of Wikidata items that are commonly used to classify numbers that we * would like to edit with this bot. This is relevant since some things that * have numerical values are not intentionally numbers, and hence should not * have a label that is a numerical representation. An example of such a * case is https://www.wikidata.org/wiki/Q2415057 which would not be * correctly labelled as "20". */ final static Set numberClasses = new HashSet<>(); static { numberClasses.add(Datamodel.makeWikidataItemIdValue("Q12503")); // integer numberClasses.add(Datamodel.makeWikidataItemIdValue("Q13366104")); // even // number numberClasses.add(Datamodel.makeWikidataItemIdValue("Q13366129")); // odd // number numberClasses.add(Datamodel.makeWikidataItemIdValue("Q21199")); // natural // number } /** * Main method to run the bot. */ public static void main(String[] args) throws LoginFailedException, IOException { ExampleHelpers.configureLogging(); printDocumentation(); SetLabelsForNumbersBot bot = new SetLabelsForNumbersBot(); ExampleHelpers.processEntitiesFromWikidataDump(bot); bot.finish(); System.out.println("*** Done."); } /** * Prints some basic documentation about this program. */ public static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: SetLabelsForNumbersBot"); System.out.println("*** "); System.out .println("*** This bot downloads recent Wikidata dumps to locate items about"); System.out .println("*** integer numbers, and it adds default labels for these items in "); System.out .println("*** several languages, if there is no label for a language yet."); System.out .println("********************************************************************"); } /** * Constructor. * * @throws LoginFailedException * @throws IOException */ public SetLabelsForNumbersBot() throws LoginFailedException, IOException { WebResourceFetcherImpl .setUserAgent("makrobot 0.3.0; Wikidata Toolkit; Java"); connection = BasicApiConnection.getWikidataApiConnection(); if (BotSettings.USERNAME != null) { connection.login(BotSettings.USERNAME, BotSettings.PASSWORD); } dataEditor = new WikibaseDataEditor(connection, Datamodel.SITE_WIKIDATA); dataEditor.setEditAsBot(BotSettings.EDIT_AS_BOT); dataEditor.disableEditing(); // do no actual edits // dataEditor.setRemainingEdits(5); // do at most 5 (test) edits dataFetcher = new WikibaseDataFetcher(connection, Datamodel.SITE_WIKIDATA); String timeStamp = new SimpleDateFormat("yyyyMMdd'T'HHmmss") .format(new Date()); this.logfile = new PrintStream( ExampleHelpers .openExampleFileOuputStream("bot-log-setnumlabels-" + timeStamp + ".txt")); } @Override public void processItemDocument(ItemDocument itemDocument) { if (itemDocument.hasStatement("P1181")) { if (lacksSomeLanguage(itemDocument)) { addLabelForNumbers(itemDocument.getEntityId()); } else { System.out.println("*** Labels already complete for " + itemDocument.getEntityId().getId()); } } // else: ignore items that have no numeric value } /** * Finishes processing and makes sure that the log file is written. */ public void finish() { this.logfile.close(); } /** * Fetches the current online data for the given item, and adds numerical * labels if necessary. * * @param itemIdValue * the id of the document to inspect */ protected void addLabelForNumbers(ItemIdValue itemIdValue) { String qid = itemIdValue.getId(); try { // Fetch the online version of the item to make sure we edit the // current version: ItemDocument currentItemDocument = (ItemDocument) dataFetcher .getEntityDocument(qid); if (currentItemDocument == null) { System.out.println("*** " + qid + " could not be fetched. Maybe it has been deleted."); return; } // Check if we still have exactly one numeric value: QuantityValue number = currentItemDocument .findStatementQuantityValue("P1181"); if (number == null) { System.out.println("*** No unique numeric value for " + qid); return; } // Check if the item is in a known numeric class: if (!currentItemDocument.hasStatementValue("P31", numberClasses)) { System.out .println("*** " + qid + " is not in a known class of integer numbers. Skipping."); return; } // Check if the value is integer and build label string: String numberString; try { BigInteger intValue = number.getNumericValue() .toBigIntegerExact(); numberString = intValue.toString(); } catch (ArithmeticException e) { System.out.println("*** Numeric value for " + qid + " is not an integer: " + number.getNumericValue()); return; } // Construct data to write: ItemDocumentBuilder itemDocumentBuilder = ItemDocumentBuilder .forItemId(itemIdValue).withRevisionId( currentItemDocument.getRevisionId()); ArrayList languages = new ArrayList<>( arabicNumeralLanguages.length); for (String arabicNumeralLanguage : arabicNumeralLanguages) { if (!currentItemDocument.getLabels().containsKey( arabicNumeralLanguage)) { itemDocumentBuilder.withLabel(numberString, arabicNumeralLanguage); languages.add(arabicNumeralLanguage); } } if (languages.size() == 0) { System.out.println("*** Labels already complete for " + qid); return; } logEntityModification(currentItemDocument.getEntityId(), numberString, languages); dataEditor.editItemDocument(itemDocumentBuilder.build(), false, "Set labels to numeric value (Task MB1)", null); } catch (MediaWikiApiErrorException | IOException e) { e.printStackTrace(); } } /** * Returns true if the given item document lacks a label for at least one of * the languages covered. * * @param itemDocument * @return true if some label is missing */ protected boolean lacksSomeLanguage(ItemDocument itemDocument) { for (String arabicNumeralLanguage : arabicNumeralLanguages) { if (!itemDocument.getLabels() .containsKey(arabicNumeralLanguage)) { return true; } } return false; } /** * Logs information about entities changed so far. * * @param entityId * the id of the modified item * @param numberLabel * the label written * @param languages * the list of languages for which the label was set */ protected void logEntityModification(EntityIdValue entityId, String numberLabel, ArrayList languages) { modifiedEntities++; System.out.println(entityId.getId() + ": adding label " + numberLabel + " for languages " + languages.toString() + " (" + modifiedEntities + " entities modified so far)"); this.logfile.println(entityId.getId() + "," + numberLabel + ",\"" + languages.toString() + "\""); if (modifiedEntities % 10 == 0) { this.logfile.flush(); } } } package-info.java000066400000000000000000000014551444772566300334640ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/bots/** * Package for examples of bots that edit Wikidata. * * @author Markus Kroetzsch * */ package org.wikidata.wdtk.examples.bots; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-examples/src/main/java/org/wikidata/wdtk/examples/package-info.java000066400000000000000000000015061444772566300325710ustar00rootroot00000000000000/** * Package for example programs that demonstrate how to use Wikidata Toolkit in practice. * * @author Markus Kroetzsch */ package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-examples/src/resources/000077500000000000000000000000001444772566300223725ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-examples/src/resources/sample-dump-20150815.json.gz000066400000000000000000000170571444772566300270450ustar00rootroot00000000000000seUmock-dump-for-long-testing.json]r#E*9pHΩ {jedbq= b|-lk4=gm8w I҇;Ͱ^|]\nlbu"[^ca٫=gkwW ~ڬ|O}*֗3\߀~"`$g~5l^ $I%_~y'~Si\WsAFDv]C4]YWOXٗ d;$ ve8w}%,#uZ"{II "N2U jkpZ kFFhRꪀ~j? ެUInGy35UaDjEWÇD=W2Q|kX1~ r0+9 D ÐTBqf^k!6$F OGGG t)b|nh} E!Ɯ' A7sb@Va 1NBr ,'`ຐſF_f,-w3wFri]/߿ d-42l^ ei+ i\7T1›N:DSJv&Z%mZIh EX#+hOj[ם&5feZgZ^uʒ2 ! 7vz5/4s3j`pE(5J!9λUQ4 |A&"=VhQnBO+32Va `דǮ'/U l@6Uփ(eݟ AemQ^3Mٗʥ-nvR+R(P-Ozq\W~MnХϗ]oW˫WW~D@V~,>>\ s޸RܿZmpgjX^\.WqaY,sobVmg-cViُc(~/Q/{[fGopCɝQ n1.+9׸O]\_3Η oR= w{g!?͈iXlA /*eLK|GO?'|d1Shz52?K<`$Mݱֈ>k!,aC2媳fB=4F1gS@ >Cs'k MZv2KgAe?ųaYQp uY 'qȤ8:('S#@  Y nqFwI@6$C f):BOBs  Oƃ3iC!`xL/Kl~> r EYB bЃ?]c"t~jSkJu PBm (SҔޑgPv騨Apʄ;? aE]IPxTNUqvpsEE(!p EbJxo'˲hYb \PD]yqSe8eb)0wL9w`C}qw[;{Y#+puF޻,,+:h$?&oUE.\6Qm_Cmk5qkuBM0) =ׁC{L1!'4 MpMr'@T&#;< jT(8Re|DZbz؜!˲LkzXQ`fmE{H-We|?\wяCj(p!3rʞ L >.qdIE}`H4(rLFd+Y)_5;<wB2 X=J:I#jP׻N}$k>5wUN6ZR1T VtyͧPh7t:h1ɏĥ8C5Ȅ졳tsYa 357= mIHlg;(6PfgLUN,1ћ(xG%IsgF0LzUX!G4,8(Qy<0R}D5BOq$-P5Ga-)B*\آC{Sv2ó&,Ԋ$t T*$; .[ | k<-Nj'+Zh#Zkj$찄&UAWzQ5|-Ccя!f!rAh:XSR|( Ƕeaׂc8 lt  e`Q*k{f%n*dCsi^LtmR '5 ES\[B2&R@Qt8{C {)ID p˚u؊9ԂQC8&MjVjnkUz'wr%g@,zBx1Rnl?HTJ%eLT^#ta0͓"D%A#XvC0 ,S6bt2V$_ \0pl1=C^%1]%na~BZ^Ţ<*s0f %0GQˋ \)Ņ⧢xWڤy9=ԀH`L$ 2LB;u1{GN["xad.%!LyA@ld~ a'&|CILhtf?AOȃgSD^dl(;]r-ijve\SHe @hS'U@兦Mk;o攭s [Uy2pccHhj<? ~E|2\5E@5ײrCb- f5A2m][S轷g9$ӊIjfr;9} /ޔ.t?#Sqd\3\s1YѣcD> /+"s(U[#dxj.ˆf1^:#/48 Klȩ8CX'{VY9: tk啬z:&CyD]br8\q)bHLARQNLvmm1>zֱA9 %|rYǯeqpZ`L1j-FTbŸ7Sdp[q$}AB>-n6sVf =3~[xx/ᶪ@٪_P b1|`@He&s?Q[QDC V7-eAnDүo z2Y8+S2Xs*w5CNe :zddd+gBpy̐Rn<`[k380?%շ p!=8b._v䳛 zIyM,-mbӇw &蟁tI8XJt9- ռѨfa}R&KPTwx>9x3VX,+h!x9\e|'cz*Sq߻O~X}y9R1b3|frquW:#:[zV nn )o>vS|`\.Ŋ_@bó'x?bTL|g޿ٯd-iKS4WcDx3LgC 7.LA#y}E4?/vYl!Z3i:tAȖ5;錫zՔ-%3pq#)Y%n+U٫ϵo׻Wߪ_|\z7g_^n{g{yVw+oѽ|ofV7קU&ϯfg j/:OX곳/$~X{}fl Z %^^+iWf@*/n?>6כsj^[} `E?Nѿ`4,aH^!)`y 1xk<}<}b~/l h,;;K D^c (%4 8^YU9۞ry ^o].?_w@בeiph_փ~][홂޲oB x4O`ofHl׶tlS_.)`͐GFQj|NdQastdxQ\s;@ѠM"3eM]|_/γ,w ;;ߊ ޛg廮f?wSXǀ{ɲ\̙Lwzqc_]j>םh |c>F!q+z}e.:oU6v*Vk\/T8% C=-ЁK) :3wv̱*>1(Իln6ht`')=C>jϗ T5l Yh#AD~TbTp\RjcHs&8F/ Qɴ`ڣ^P A+ԼuDǺk)ɧ~Oj5֪Ya0uEWj߾s"DDFEoLUcG}IC?yǰ 5ݣ+00#Fd &Gף@9ezGkduP906 ƮD ERY Ѥ#Q]l® 4(4Uj{RmK)uC\W@?ۊN(9f?'X:DXDA%oT_LОjУO0J* s/>Foxt}1 j!0^%jG^^ = nx T }D%ފNc {@O-03FPl;Gr1b =UG[uFQ[qcQֺbU |AGAQ(07z{z?vf=(5 <:m[;!}/(״EW{`vO{('4n`p!חx .4"n 9btC0ŝpL:m5i: PNF&"#ߚOHcOg_{To|7a^6)12)'"_m=TsJlnpJ4Ƒ } 6  ;Uo*dډ*'n^!G.C뜙RLhA#Ja1_!?f*4?IlSF UTr)H$b2‰"%B->E[ߩ̼[XK~Ԙ# F Ik8Pu 56+N\m͹F U0՞b:Zt:dG3`0Qb]8A <LMZ†X@ | a܄$+a82MOyze*Dɧ޺TKR\D(4Beԡ4{|mbQ[ 81HIN-25~(O4C^Wikidata-Toolkit-0.14.6/wdtk-rdf/000077500000000000000000000000001444772566300165265ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/LICENSE.txt000066400000000000000000000261351444772566300203600ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-rdf/pom.xml000066400000000000000000000032121444772566300200410ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-rdf jar Wikidata Toolkit RDF serialize RDF from the wdtk data model org.eclipse.rdf4j rdf4j-rio-turtle ${rdf4jVersion} org.eclipse.rdf4j rdf4j-rio-ntriples ${rdf4jVersion} ${project.groupId} wdtk-datamodel ${project.version} ${project.groupId} wdtk-util ${project.version} ${project.groupId} wdtk-wikibaseapi ${project.version} ${project.groupId} wdtk-testing ${project.version} test src/test/resources Wikidata-Toolkit-0.14.6/wdtk-rdf/src/000077500000000000000000000000001444772566300173155ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/000077500000000000000000000000001444772566300202415ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/000077500000000000000000000000001444772566300211625ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/000077500000000000000000000000001444772566300217515ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/000077500000000000000000000000001444772566300235465ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300245175ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/000077500000000000000000000000001444772566300252725ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/AbstractRdfConverter.java000066400000000000000000000374611444772566300322370ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.implementation.DatatypeIdImpl; import org.wikidata.wdtk.datamodel.interfaces.*; import org.wikidata.wdtk.rdf.values.AnyValueConverter; /** * This class provides functions to convert objects of wdtk-datamodel in a rdf * graph. * * @author Michael Günther * */ abstract public class AbstractRdfConverter { static final Logger logger = LoggerFactory.getLogger(AbstractRdfConverter.class); final RdfWriter rdfWriter; final AnyValueConverter valueRdfConverter; final SnakRdfConverter snakRdfConverter; final OwlDeclarationBuffer owlDeclarationBuffer = new OwlDeclarationBuffer(); final ReferenceRdfConverter referenceRdfConverter; final PropertyRegister propertyRegister; final Sites sites; public enum TermKind { LABEL, DESCRIPTION, ALIAS } public AbstractRdfConverter(RdfWriter rdfWriter, Sites sites, PropertyRegister propertyRegister) { this.sites = sites; this.rdfWriter = rdfWriter; this.propertyRegister = propertyRegister; this.valueRdfConverter = new AnyValueConverter(rdfWriter, this.owlDeclarationBuffer, this.propertyRegister); this.snakRdfConverter = new SnakRdfConverter(rdfWriter, this.owlDeclarationBuffer, this.propertyRegister, this.valueRdfConverter); this.referenceRdfConverter = new ReferenceRdfConverter(rdfWriter, this.snakRdfConverter, this.propertyRegister.siteUri); } /** * Writes OWL declarations for all basic vocabulary elements used in the * dump. * * Example of the triples written by this method: * {@code wikibase:propertyType rdf:type owl:ObjectProperty} */ public void writeBasicDeclarations() throws RDFHandlerException { for (Map.Entry uriType : Vocabulary .getKnownVocabularyTypes().entrySet()) { this.rdfWriter.writeTripleUriObject(uriType.getKey(), RdfWriter.RDF_TYPE, uriType.getValue()); } } /** * Writes all namespace declarations used in the dump, for example {@code wikibase:} or {@code schema:}. */ public void writeNamespaceDeclarations() throws RDFHandlerException { this.rdfWriter.writeNamespaceDeclaration("wd", this.propertyRegister.getUriPrefix()); this.rdfWriter .writeNamespaceDeclaration("wikibase", Vocabulary.PREFIX_WBONTO); this.rdfWriter.writeNamespaceDeclaration("rdf", Vocabulary.PREFIX_RDF); this.rdfWriter .writeNamespaceDeclaration("rdfs", Vocabulary.PREFIX_RDFS); this.rdfWriter.writeNamespaceDeclaration("owl", Vocabulary.PREFIX_OWL); this.rdfWriter.writeNamespaceDeclaration("xsd", Vocabulary.PREFIX_XSD); this.rdfWriter.writeNamespaceDeclaration("schema", Vocabulary.PREFIX_SCHEMA); this.rdfWriter .writeNamespaceDeclaration("skos", Vocabulary.PREFIX_SKOS); this.rdfWriter .writeNamespaceDeclaration("prov", Vocabulary.PREFIX_PROV); } /** * Writes all buffered triples and finishes writing a document. * * This will take care of writing auxiliary triples that got buffered during serialization, * such as OWL declarations, references and auxiliary triples for complex values. */ public void finishDocument() throws RDFHandlerException { this.snakRdfConverter.writeAuxiliaryTriples(); this.writeOWLDeclarations(); this.referenceRdfConverter.writeReferences(); } public void writeOWLDeclarations() { this.owlDeclarationBuffer.writePropertyDeclarations(this.rdfWriter, true, true); } /* * Suppress spurious warning about IRI.toString() caused by deprecation of base URI interface. */ @SuppressWarnings("deprecation") public void writeDocumentType(Resource subject, IRI type) { this.rdfWriter.writeTripleUriObject(subject, RdfWriter.RDF_TYPE, type.toString()); } public void writeItemDocument(ItemDocument document) throws RDFHandlerException { final String subjectUri = document.getEntityId().getIri(); final Resource subject = this.rdfWriter.getUri(subjectUri); writeDocumentType(subject, RdfWriter.WB_ITEM); writeDocumentTerms(document); writeStatements(document); writeSiteLinks(subject, document.getSiteLinks()); finishDocument(); } public void writePropertyDatatype(PropertyDocument document) { this.rdfWriter.writeTripleValueObject( this.rdfWriter.getUri(document.getEntityId().getIri()), RdfWriter.WB_PROPERTY_TYPE, this.rdfWriter.getUri(getDatatypeIri(document.getDatatype()))); } public void writePropertyDocument(PropertyDocument document) throws RDFHandlerException { propertyRegister.setPropertyType(document.getEntityId(), getDatatypeIri(document .getDatatype())); final String subjectUri = document.getEntityId().getIri(); final Resource subject = this.rdfWriter.getUri(subjectUri); writeDocumentType(subject, RdfWriter.WB_PROPERTY); writePropertyDatatype(document); writeDocumentTerms(document); writeStatements(document); writeInterPropertyLinks(document); finishDocument(); } /** * Writes triples which connect properties with their corresponding rdf * properties for statements, simple statements, qualifiers, reference * attributes and values. */ public void writeInterPropertyLinks(PropertyDocument document) throws RDFHandlerException { Resource subject = this.rdfWriter.getUri(document.getEntityId() .getIri()); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_DIRECT_CLAIM_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.DIRECT)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_CLAIM_PROP), Vocabulary.getPropertyUri( document.getEntityId(), PropertyContext.STATEMENT)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_STATEMENT_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.VALUE_SIMPLE)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_STATEMENT_VALUE_PROP), Vocabulary.getPropertyUri(document.getEntityId(), PropertyContext.VALUE)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_QUALIFIER_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.QUALIFIER_SIMPLE)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_QUALIFIER_VALUE_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.QUALIFIER)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_REFERENCE_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.REFERENCE_SIMPLE)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_REFERENCE_VALUE_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.REFERENCE)); this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter .getUri(Vocabulary.WB_NO_VALUE_PROP), Vocabulary .getPropertyUri(document.getEntityId(), PropertyContext.NO_VALUE)); // TODO something more with NO_VALUE } public void writeDocumentTerms(TermedDocument document) throws RDFHandlerException { final Resource subject = this.rdfWriter.getUri(document.getEntityId().getIri()); writeTermTriples(subject, TermKind.LABEL, document.getLabels().values()); writeTermTriples(subject, TermKind.DESCRIPTION, document.getDescriptions().values()); for (List aliases : document.getAliases().values()) { writeTermTriples(subject, TermKind.ALIAS, aliases); } } public void writeTermTriples(Resource subject, TermKind kind, Collection terms) throws RDFHandlerException { final IRI predicate; switch (kind) { case LABEL: predicate = RdfWriter.RDFS_LABEL; break; case DESCRIPTION: predicate = RdfWriter.SCHEMA_DESCRIPTION; break; case ALIAS: predicate = RdfWriter.SKOS_ALT_LABEL; break; default: throw new IllegalArgumentException(); } for (MonolingualTextValue mtv : terms) { this.rdfWriter.writeTripleValueObject(subject, predicate, AbstractRdfConverter.getMonolingualTextValueLiteral(mtv, this.rdfWriter)); } } public void writeStatements(StatementDocument statementDocument) throws RDFHandlerException { for (StatementGroup statementGroup : statementDocument.getStatementGroups()) { // determine the rank of the best statement final StatementGroup bestStatements = statementGroup.getBestStatements(); final StatementRank bestRank; if (statementGroup.getBestStatements() != null) { bestRank = bestStatements.iterator().next().getRank(); } else { bestRank = null; } for (Statement statement : statementGroup) { writeStatement(statement, statement.getRank() == bestRank); } } } public void writeStatement(Statement statement, boolean best) throws RDFHandlerException { if (best) { writeSimpleStatement(statement); } writeFullStatement(statement, best); } public void writeFullStatement(Statement statement, boolean best) throws RDFHandlerException { final Resource subject = this.rdfWriter.getUri(statement.getSubject().getIri()); String statementUri = Vocabulary.getStatementUri(statement); Resource statementResource = this.rdfWriter.getUri(statementUri); final IRI propertyIri = this.rdfWriter.getUri( Vocabulary.getPropertyUri(statement.getMainSnak().getPropertyId(), PropertyContext.STATEMENT)); this.rdfWriter.writeTripleUriObject(subject, propertyIri, statementUri); this.rdfWriter.writeTripleValueObject(statementResource, RdfWriter.RDF_TYPE, RdfWriter.WB_STATEMENT); writeClaim(statementResource, statement.getClaim()); writeReferences(statementResource, statement.getReferences()); writeStatementRankTriple(statementResource, statement.getRank(), best); } public void writeSimpleStatement(Statement statement) { final Resource subject = this.rdfWriter.getUri(statement.getSubject().getIri()); this.snakRdfConverter.setSnakContext(subject, PropertyContext.DIRECT); statement.getMainSnak().accept(this.snakRdfConverter); } /** * Writes a triple for the {@link StatementRank} of a {@link Statement} to * the dump. If this is a best-rank statement, also writes a best rank triple. * * @param subject The IRI of the statement * @param rank The rank of the statement * @param best True if this statement is a best-rank statement */ public void writeStatementRankTriple(Resource subject, StatementRank rank, boolean best) { try { this.rdfWriter.writeTripleUriObject(subject, RdfWriter.WB_RANK, Vocabulary.getStatementRankUri(rank)); if (best) { this.rdfWriter.writeTripleUriObject(subject, RdfWriter.RDF_TYPE, Vocabulary.WB_BEST_RANK); } } catch (RDFHandlerException e) { throw new RuntimeException(e.getMessage(), e); } } public void writeReferences(Resource statementResource, List references) throws RDFHandlerException { for (Reference reference : references) { Resource resource = this.referenceRdfConverter .addReference(reference); this.rdfWriter.writeTripleValueObject(statementResource, RdfWriter.PROV_WAS_DERIVED_FROM, resource); } } public void writeClaim(Resource claimResource, Claim claim) { // write main snak this.snakRdfConverter.setSnakContext(claimResource, PropertyContext.VALUE); claim.getMainSnak().accept(this.snakRdfConverter); this.snakRdfConverter.setSnakContext(claimResource, PropertyContext.VALUE_SIMPLE); claim.getMainSnak().accept(this.snakRdfConverter); // write qualifier this.snakRdfConverter.setSnakContext(claimResource, PropertyContext.QUALIFIER); for (SnakGroup snakGroup : claim.getQualifiers()) { for (Snak snak : snakGroup) { snak.accept(this.snakRdfConverter); } } this.snakRdfConverter.setSnakContext(claimResource, PropertyContext.QUALIFIER_SIMPLE); for (SnakGroup snakGroup : claim.getQualifiers()) { for (Snak snak : snakGroup) { snak.accept(this.snakRdfConverter); } } } public void writeSiteLinks(Resource subject, Map siteLinks) throws RDFHandlerException { for (String key : siteLinks.keySet()) { SiteLink siteLink = siteLinks.get(key); String siteLinkUrl = this.sites.getSiteLinkUrl(siteLink); if (siteLinkUrl != null) { IRI siteLinkUri = this.rdfWriter.getUri(siteLinkUrl); this.rdfWriter.writeTripleValueObject(siteLinkUri, RdfWriter.RDF_TYPE, RdfWriter.SCHEMA_ARTICLE); this.rdfWriter.writeTripleValueObject(siteLinkUri, RdfWriter.SCHEMA_ABOUT, subject); String siteLanguageCode = this.sites.getLanguageCode(siteLink.getSiteKey()); this.rdfWriter.writeTripleStringObject(siteLinkUri, RdfWriter.SCHEMA_IN_LANGUAGE, convertSiteLanguageCode(siteLanguageCode)); for(ItemIdValue badge : siteLink.getBadges()) { this.rdfWriter.writeTripleUriObject(siteLinkUri, RdfWriter.WB_BADGE, badge.getIri()); } } else { logger.warn("Failed to find URL for page \"" + siteLink.getPageTitle() + "\" on site \"" + siteLink.getSiteKey() + "\""); } } } private String convertSiteLanguageCode(String languageCode) { try { return WikimediaLanguageCodes.getLanguageCode(languageCode); } catch (IllegalArgumentException e) { logger.warn("Unknown Wikimedia language code \"" + languageCode + "\". Using this code in RDF now, but this might be wrong."); return languageCode; } } public static Value getMonolingualTextValueLiteral( MonolingualTextValue value, RdfWriter rdfWriter) { String languageCode; try { languageCode = WikimediaLanguageCodes.getLanguageCode(value .getLanguageCode()); } catch (IllegalArgumentException e) { languageCode = value.getLanguageCode(); logger.warn("Unknown Wikimedia language code \"" + languageCode + "\". Using this code in RDF now, but this might be wrong."); } return rdfWriter.getLiteral(value.getText(), languageCode); } public static String getDatatypeIri(DatatypeIdValue datatype) { String jsonDatatype = datatype.getJsonString(); switch (jsonDatatype) { case DatatypeIdValue.JSON_DT_ITEM: return Vocabulary.DT_ITEM; case DatatypeIdValue.JSON_DT_PROPERTY: return Vocabulary.DT_PROPERTY; case DatatypeIdValue.JSON_DT_GLOBE_COORDINATES: return Vocabulary.DT_GLOBE_COORDINATES; case DatatypeIdValue.JSON_DT_URL: return Vocabulary.DT_URL; case DatatypeIdValue.JSON_DT_COMMONS_MEDIA: return Vocabulary.DT_COMMONS_MEDIA; case DatatypeIdValue.JSON_DT_TIME: return Vocabulary.DT_TIME; case DatatypeIdValue.JSON_DT_QUANTITY: return Vocabulary.DT_QUANTITY; case DatatypeIdValue.JSON_DT_STRING: return Vocabulary.DT_STRING; case DatatypeIdValue.JSON_DT_MONOLINGUAL_TEXT: return Vocabulary.DT_MONOLINGUAL_TEXT; case DatatypeIdValue.JSON_DT_EDTF: return Vocabulary.DT_EDTF; default: String[] parts = jsonDatatype.split("-"); for(int i = 0; i < parts.length; i++) { parts[i] = StringUtils.capitalize(parts[i]); } return "http://wikiba.se/ontology#" + StringUtils.join(parts); } } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/OwlDeclarationBuffer.java000066400000000000000000000235421444772566300322040ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; /** * This class stores information about data that should be serialized in RDF * later on. This is done for two reasons: (1) to produce output where triples * are ordered by subject, (2) to avoid some duplicate triples for things that * are needed in many places. Due to memory constraints, this class does not * provide perfect duplicate elimination. * * @author Markus Kroetzsch * */ public class OwlDeclarationBuffer { final List objectPropertyQueue = new ArrayList<>(); final List datatypePropertyQueue = new ArrayList<>(); final List objectPropertyUriQueue = new ArrayList<>(); final List datatypePropertyUriQueue = new ArrayList<>(); final Set declaredProperties = new HashSet<>(); final Set declaredPropertyUris = new HashSet<>(); final List classEntityQueue = new ArrayList<>(); final Set declaredClassEntities = new HashSet<>(); /** * Adds the given property id value to the list of properties that should be * declared as OWL object properties. * * @param propertyIdValue * the property to declare */ public void addObjectProperty(PropertyIdValue propertyIdValue) { if (!this.declaredProperties.contains(propertyIdValue)) { this.objectPropertyQueue.add(propertyIdValue); } } /** * Adds the given property URI string to the list of property URIs that * should be declared as OWL object properties. * * @param propertyUri * the property to declare */ public void addObjectProperty(IRI propertyUri) { if (!this.declaredPropertyUris.contains(propertyUri)) { this.objectPropertyUriQueue.add(propertyUri); } } /** * Adds the given property id value to the list of properties that should be * declared as OWL datatype properties. * * @param propertyIdValue * the property to declare */ public void addDatatypeProperty(PropertyIdValue propertyIdValue) { if (!this.declaredProperties.contains(propertyIdValue)) { this.datatypePropertyQueue.add(propertyIdValue); } } /** * Adds the given property URI string to the list of property URIs that * should be declared as OWL datatype properties. * * @param propertyUri * the property to declare */ public void addDatatypeProperty(IRI propertyUri) { if (!this.declaredPropertyUris.contains(propertyUri)) { this.datatypePropertyUriQueue.add(propertyUri); } } /** * Adds the given entity id value to the list of entities that should be * declared as OWL classes. * * @param entityIdValue * the property to declare */ public void addClass(EntityIdValue entityIdValue) { if (!this.declaredClassEntities.contains(entityIdValue)) { this.classEntityQueue.add(entityIdValue); } } /** * Writes OWL declarations for properties that have been added recently. * Declared properties are stored so that duplicate declarations are * avoided. * * @param rdfWriter * the writer to write the declarations to * @param fullStatements * if true, then properties need to export full statements (with * qualifiers and references) will be declared * @param simpleClaims * if true, then properties to export simple claims (flat * triples) will be declared * @throws RDFHandlerException * if there was a problem writing the declarations */ public void writePropertyDeclarations(RdfWriter rdfWriter, boolean fullStatements, boolean simpleClaims) throws RDFHandlerException { boolean anyStatements = fullStatements || simpleClaims; for (PropertyIdValue propertyIdValue : this.objectPropertyQueue) { if (!this.declaredProperties.add(propertyIdValue)) { continue; } if (anyStatements) { writeNoValueRestriction(rdfWriter, propertyIdValue.getIri(), Vocabulary.OWL_THING, Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.NO_VALUE)); } if (fullStatements) { rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.STATEMENT), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.VALUE_SIMPLE), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.VALUE), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.QUALIFIER), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.REFERENCE), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.QUALIFIER_SIMPLE), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.REFERENCE_SIMPLE), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); } if (simpleClaims) { rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.DIRECT), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); } } this.objectPropertyQueue.clear(); for (PropertyIdValue propertyIdValue : this.datatypePropertyQueue) { if (!this.declaredProperties.add(propertyIdValue)) { continue; } if (anyStatements) { writeNoValueRestriction(rdfWriter, propertyIdValue.getIri(), Vocabulary.XSD_STRING, Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.NO_VALUE)); } if (fullStatements) { rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.STATEMENT), RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.VALUE), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.QUALIFIER), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.REFERENCE), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.VALUE_SIMPLE), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.QUALIFIER_SIMPLE), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.REFERENCE_SIMPLE), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); } if (simpleClaims) { rdfWriter.writeTripleValueObject(Vocabulary.getPropertyUri( propertyIdValue, PropertyContext.DIRECT), RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); } } this.datatypePropertyQueue.clear(); for (IRI propertyUri : this.objectPropertyUriQueue) { if (!this.declaredPropertyUris.add(propertyUri)) { continue; } rdfWriter.writeTripleValueObject(propertyUri, RdfWriter.RDF_TYPE, RdfWriter.OWL_OBJECT_PROPERTY); } this.objectPropertyUriQueue.clear(); for (IRI propertyUri : this.datatypePropertyUriQueue) { if (!this.declaredPropertyUris.add(propertyUri)) { continue; } rdfWriter.writeTripleValueObject(propertyUri, RdfWriter.RDF_TYPE, RdfWriter.OWL_DATATYPE_PROPERTY); } this.datatypePropertyUriQueue.clear(); } /** * Writes no-value restriction. * * @param rdfWriter * the writer to write the restrictions to * @param propertyUri * URI of the property to which the restriction applies * @param rangeUri * URI of the class or datatype to which the restriction applies * @param subject * node representing the restriction * @throws RDFHandlerException * if there was a problem writing the RDF triples */ void writeNoValueRestriction(RdfWriter rdfWriter, String propertyUri, String rangeUri, String subject) throws RDFHandlerException { Resource bnodeSome = rdfWriter.getFreshBNode(); rdfWriter.writeTripleValueObject(subject, RdfWriter.RDF_TYPE, RdfWriter.OWL_CLASS); rdfWriter.writeTripleValueObject(subject, RdfWriter.OWL_COMPLEMENT_OF, bnodeSome); rdfWriter.writeTripleValueObject(bnodeSome, RdfWriter.RDF_TYPE, RdfWriter.OWL_RESTRICTION); rdfWriter.writeTripleUriObject(bnodeSome, RdfWriter.OWL_ON_PROPERTY, propertyUri); rdfWriter.writeTripleUriObject(bnodeSome, RdfWriter.OWL_SOME_VALUES_FROM, rangeUri); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/PropertyContext.java000066400000000000000000000020041444772566300313220ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Enum to define the context in which a propery is used in the RDF export. We * use different URIs depending on this context. * * @author Markus Kroetzsch * */ public enum PropertyContext { STATEMENT, VALUE, QUALIFIER, REFERENCE, REFERENCE_SIMPLE, DIRECT, VALUE_SIMPLE, QUALIFIER_SIMPLE, NO_VALUE, NO_QUALIFIER_VALUE } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/PropertyRegister.java000066400000000000000000000364021444772566300314730ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; import java.io.IOException; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.*; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.Map.Entry; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.implementation.EntityIdValueImpl; import org.wikidata.wdtk.datamodel.implementation.PropertyIdValueImpl; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import org.wikidata.wdtk.wikibaseapi.ApiConnection; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; /** * This class helps to manage information about Properties that has to obtained * by a webservice. * * @author Michael Guenther * */ public class PropertyRegister { static final Logger logger = LoggerFactory .getLogger(PropertyRegister.class); /** * Object used to fetch data. Kept package private to allow being replaced * by mock object in tests. */ WikibaseDataFetcher dataFetcher; /** * Map that stores the datatype of properties. Properties are identified by * their Pid; dataypes are identified by their datatype IRI. */ final protected Map datatypes = new HashMap<>(); /** * Map that stores the URI patterns of properties. Properties are identified * by their Pid; patterns are given as strings using $1 as placeholder for * the escaped value. */ final protected Map uriPatterns = new HashMap<>(); /** * Pid of the property used to store URI patterns, if used, or null if no * such property should be considered. */ final String uriPatternPropertyId; /** * URI prefix to be used on this site. */ final String siteUri; /** * Maximum number of property documents that can be retrieved in one API * call. */ final int API_MAX_ENTITY_DOCUMENT_NUMBER = 50; /** * Smallest property number for which no information has been fetched from * the Web yet in a systematic fashion. Whenever any property data is * fetched, additional properties are also fetched and this number is * incremented accordingly. */ int smallestUnfetchedPropertyIdNumber = 1; /** * Properties that are known to be missing. This is used to avoid making * a request for this property again. */ final Set knownMissing; static final PropertyRegister WIKIDATA_PROPERTY_REGISTER = new PropertyRegister( "P1921", BasicApiConnection.getWikidataApiConnection(), Datamodel.SITE_WIKIDATA); /** * Constructs a new property register. * * @param uriPatternPropertyId * property id used for a URI Pattern property, e.g., P1921 on * Wikidata; can be null if no such property should be used * @param apiConnection * API connection object that defines how to connect to the * online API * @param siteUri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" */ public PropertyRegister(String uriPatternPropertyId, ApiConnection apiConnection, String siteUri) { this.uriPatternPropertyId = uriPatternPropertyId; this.siteUri = siteUri; this.knownMissing = new HashSet<>(); dataFetcher = new WikibaseDataFetcher(apiConnection, siteUri); } /** * Returns a singleton object that serves as a property register for * Wikidata. * * @return property register for Wikidata */ public static PropertyRegister getWikidataPropertyRegister() { return WIKIDATA_PROPERTY_REGISTER; } /** * Returns the URI prefix that is used on the site considered by this * object. This string also identifies the site globally. * * @return The URI prefix, e.g., "http://www.wikidata.org/entity/" */ public String getUriPrefix() { return this.siteUri; } /** * Returns the IRI of the primitive type of an {@link PropertyIdValue}. * * @param propertyIdValue * property whose datatype should be fetched * @return URI of the datatype of this property, or null if the type could * not be determined */ public String getPropertyType(PropertyIdValue propertyIdValue) { if (!datatypes.containsKey(propertyIdValue.getId())) { fetchPropertyInformation(propertyIdValue); } return datatypes.get(propertyIdValue.getId()); } /** * Sets datatypeIri an IRI of the primitive type of an Property for * {@link PropertyIdValue}. * * @param propertyIdValue * @param datatypeIri */ public void setPropertyType(PropertyIdValue propertyIdValue, String datatypeIri) { datatypes.put(propertyIdValue.getId(), datatypeIri); } /** * Returns the URI Pattern of a {@link PropertyIdValue} that should be used * to create URIs of external resources from statement values for the * property. * * @param propertyIdValue * property to fetch URI pattern for * @return string pattern using "$1" as a placeholder, or null if no pattern * was found for the given property */ public String getPropertyUriPattern(PropertyIdValue propertyIdValue) { if (!this.datatypes.containsKey(propertyIdValue.getId())) { fetchPropertyInformation(propertyIdValue); } return this.uriPatterns.get(propertyIdValue.getId()); } /** * Returns the IRI of the primitive Type of an Property for * {@link EntityIdValue} objects. * * TODO: this really ought to be exposed by the wdtk-datamodel * module and reused here. The same heuristic is implemented in {@link EntityIdValueImpl}. * @param propertyIdValue * @param value */ public String setPropertyTypeFromEntityIdValue( PropertyIdValue propertyIdValue, EntityIdValue value) { try { return EntityIdValueImpl.guessEntityTypeFromId(value.getId()); } catch (IllegalArgumentException e) { logger.warn("Could not determine datatype of "+ propertyIdValue.getId() + "."); logger.warn("Example value "+value.getId()+ " is not recognized as a valid entity id."); logger.warn("Perhaps this is a newly introduced datatype not supported by this version of wdtk."); logger.warn("Consider upgrading the library to a newer version."); return null; } } /** * Returns the IRI of the primitive Type of an Property for * {@link GlobeCoordinatesValue} objects. * * @param propertyIdValue * @param value */ public String setPropertyTypeFromGlobeCoordinatesValue( PropertyIdValue propertyIdValue, GlobeCoordinatesValue value) { return DatatypeIdValue.DT_GLOBE_COORDINATES; } /** * Returns the IRI of the primitive Type of an Property for * {@link QuantityValue} objects. * * @param propertyIdValue * @param value */ public String setPropertyTypeFromQuantityValue( PropertyIdValue propertyIdValue, QuantityValue value) { return DatatypeIdValue.DT_QUANTITY; } /** * Returns the IRI of the primitive Type of an Property for * {@link StringValue} objects. * * @param propertyIdValue * @param value */ public String setPropertyTypeFromStringValue( PropertyIdValue propertyIdValue, StringValue value) { String datatype = getPropertyType(propertyIdValue); if (datatype == null) { logger.warn("Could not fetch datatype of " + propertyIdValue.getIri() + ". Assuming type " + DatatypeIdValue.DT_STRING); return DatatypeIdValue.DT_STRING; // default type for StringValue } else { return datatype; } } /** * Returns the IRI of the primitive Type of an Property for * {@link TimeValue} objects. * * @param propertyIdValue * @param value */ public String setPropertyTypeFromTimeValue(PropertyIdValue propertyIdValue, TimeValue value) { return DatatypeIdValue.DT_TIME; } /** * Returns the IRI of the primitive Type of an Property for * {@link MonolingualTextValue} objects. * * @param propertyIdValue * @param value */ public String setPropertyTypeFromMonolingualTextValue( PropertyIdValue propertyIdValue, MonolingualTextValue value) { return DatatypeIdValue.DT_MONOLINGUAL_TEXT; } /** * Fetches the information of the given property from the Web API. Further * properties are fetched in the same request and results cached so as to * limit the total number of Web requests made until all properties are * fetched. * * @param property */ protected void fetchPropertyInformation(PropertyIdValue property) { int propertyIdNumber = Integer.parseInt(property.getId().substring(1)); // Don't do anything if all properties up to this index have already // been fetched. In particular, don't try indefinitely to find a // certain property type (maybe the property was deleted). // // If we previously tried to fetch this property and didn't // find it, there is no point in trying again either. if (this.smallestUnfetchedPropertyIdNumber > propertyIdNumber || knownMissing.contains(property.getId())) { return; } List propertyIds = new ArrayList<>( API_MAX_ENTITY_DOCUMENT_NUMBER); propertyIds.add(property.getId()); for (int i = 1; i < API_MAX_ENTITY_DOCUMENT_NUMBER; i++) { propertyIds.add("P" + this.smallestUnfetchedPropertyIdNumber); this.smallestUnfetchedPropertyIdNumber++; } dataFetcher.getFilter().setLanguageFilter(Collections.emptySet()); dataFetcher.getFilter().setSiteLinkFilter(Collections.emptySet()); Map properties; try { properties = dataFetcher.getEntityDocuments(propertyIds); } catch (MediaWikiApiErrorException|IOException e) { logger.error("Error when trying to fetch property data: " + e.toString()); properties = Collections.emptyMap(); } for (Entry entry : properties.entrySet()) { EntityDocument propertyDocument = entry.getValue(); if (!(propertyDocument instanceof PropertyDocument)) { continue; } String datatype = AbstractRdfConverter.getDatatypeIri(((PropertyDocument) propertyDocument) .getDatatype()); this.datatypes.put(entry.getKey(), datatype); logger.info("Fetched type information for property " + entry.getKey() + " online: " + datatype); if (!DatatypeIdValue.DT_STRING.equals(datatype) && !DatatypeIdValue.DT_EXTERNAL_ID.equals(datatype)) { continue; } for (StatementGroup sg : ((PropertyDocument) propertyDocument) .getStatementGroups()) { if (!sg.getProperty().getId().equals(this.uriPatternPropertyId)) { continue; } for (Statement statement : sg) { if (statement.getMainSnak() instanceof ValueSnak && statement.getValue() instanceof StringValue) { String uriPattern = ((StringValue) statement.getValue()).getString(); if (this.uriPatterns.containsKey(entry.getKey())) { logger.info("Found multiple URI patterns for property " + entry.getKey() + " but only one is supported in current code."); } this.uriPatterns.put(entry.getKey(), uriPattern); } } } } if (!this.datatypes.containsKey(property.getId())) { logger.error("Failed to fetch type information for property " + property.getId() + " online."); knownMissing.add(property.getId()); } } /** * Fetches type information for all known properties from the given SPARQL endpoint, and adds it to the register. * The SPARQL endpoint must support the wikibase:propertyType predicate. * * @param endpoint URI of the SPARQL service to use, for example "https://query.wikidata.org/sparql" */ /* * Suppress spurious warning about IRI.toString() caused by deprecation of base URI interface. */ @SuppressWarnings("deprecation") public void fetchUsingSPARQL(URI endpoint) { try { // this query is written without assuming any PREFIXES like wd: or wdt: to ensure it is as portable // as possible (the PropertyRegister might be used with private Wikibase instances and SPARQL endpoints // that don't have the same PREFIXES defined as the Wikidata Query Service) final String query = "SELECT ?prop ?type ?uri WHERE { " + "<" + this.siteUri + this.uriPatternPropertyId + "> ?uriDirect . " + "?prop ?type . " + "OPTIONAL { ?prop ?uriDirect ?uri } " + "}"; final String queryString = "query=" + query + "&format=json"; final URL queryUrl = new URI( endpoint.getScheme(), endpoint.getUserInfo(), endpoint.getHost(), endpoint.getPort(), endpoint.getPath(), queryString, null ).toURL(); final HttpURLConnection connection = (HttpURLConnection) queryUrl.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("User-Agent", "Wikidata-Toolkit PropertyRegister"); final ObjectMapper mapper = new ObjectMapper(); JsonNode root = mapper.readTree(connection.getInputStream()); JsonNode bindings = root.path("results").path("bindings"); final ValueFactory valueFactory = SimpleValueFactory.getInstance(); int count = 0; int countPatterns = 0; for (JsonNode binding : bindings) { final IRI property = valueFactory.createIRI(binding.path("prop").path("value").asText()); final IRI propType = valueFactory.createIRI(binding.path("type").path("value").asText()); final PropertyIdValue propId = new PropertyIdValueImpl(property.getLocalName(), this.siteUri); setPropertyType(propId, propType.toString()); count += 1; if (binding.has("uri")) { countPatterns += 1; this.uriPatterns.put(propId.getId(), binding.path("uri").path("value").asText()); } } logger.info("Fetched type information for " + count + " properties (" + countPatterns + " with URI patterns) using SPARQL."); } catch(IOException|URISyntaxException e) { logger.error("Error when trying to fetch property data using SPARQL: " + e.toString()); } } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/RankBuffer.java000066400000000000000000000044101444772566300301610ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.HashSet; import java.util.Set; import org.eclipse.rdf4j.model.Resource; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; /** * Holds information about the highest rank in an {@link EntityDocument} and the * corresponding statement to generate BestRank triples. * Ignores deprecated statements * * @author Michael Guenther * */ public class RankBuffer { /** * highest Rank of an statment in the current {@link EntityDocument}. */ private StatementRank bestRank = StatementRank.NORMAL; /** * Rdf Resources that refer to statements with the highest rank. */ private final Set subjects = new HashSet<>(); /** * Clears the buffer. This function should be called after each export of an * entity document. */ public void clear() { bestRank = StatementRank.NORMAL; subjects.clear(); } /** * Adds a Statement. * * @param rank * rank of the statement * @param subject * rdf resource that refers to the statement */ public void add(StatementRank rank, Resource subject) { if (this.bestRank == rank) { subjects.add(subject); } else if(bestRank == StatementRank.NORMAL && rank == StatementRank.PREFERRED) { //We found a preferred statement subjects.clear(); bestRank = StatementRank.PREFERRED; subjects.add(subject); } } /** * Returns the statements with the highest rank. * * @return statement resource with the highest rank. */ public Set getBestRankedStatements() { return this.subjects; } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/RdfConverter.java000066400000000000000000000107471444772566300305510ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /*- * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.*; import java.util.Collection; import java.util.Map; public class RdfConverter extends AbstractRdfConverter { int tasks = RdfSerializer.TASK_ALL_ENTITIES | RdfSerializer.TASK_ALL_EXACT_DATA; public RdfConverter(RdfWriter rdfWriter, Sites sites, PropertyRegister propertyRegister) { super(rdfWriter, sites, propertyRegister); } /** * Sets the tasks that should be performed during export. The value should * be a combination of flags such as {@link RdfSerializer#TASK_STATEMENTS}. * * @param tasks * the tasks to be performed */ public void setTasks(int tasks) { this.tasks = tasks; } /** * Returns the tasks that should be performed during export. The value * should be a combination of flags such as * {@link RdfSerializer#TASK_STATEMENTS}. * * @return tasks to be performed */ public int getTasks() { return this.tasks; } /** * Checks if the given task (or set of tasks) is to be performed. * * @param task * the task (or set of tasks) to be checked * @return true if the tasks include the given task */ boolean hasTask(int task) { return ((this.tasks & task) == task); } @Override public void writeTermTriples(Resource subject, TermKind kind, Collection terms) throws RDFHandlerException { switch (kind) { case LABEL: if (!hasTask(RdfSerializer.TASK_LABELS)) return; break; case DESCRIPTION: if (!hasTask(RdfSerializer.TASK_DESCRIPTIONS)) return; break; case ALIAS: if (!hasTask(RdfSerializer.TASK_ALIASES)) return; break; } super.writeTermTriples(subject, kind, terms); } @Override public void writeSiteLinks(Resource subject, Map siteLinks) throws RDFHandlerException { if (!hasTask(RdfSerializer.TASK_SITELINKS)) return; super.writeSiteLinks(subject, siteLinks); } @Override public void writePropertyDatatype(PropertyDocument document) { if (!hasTask(RdfSerializer.TASK_DATATYPES)) return; super.writePropertyDatatype(document); } @Override public void writeInterPropertyLinks(PropertyDocument document) throws RDFHandlerException { if (!hasTask(RdfSerializer.TASK_PROPERTY_LINKS)) return; super.writeInterPropertyLinks(document); } @Override public void writeSimpleStatement(Statement statement) { if (!hasTask(RdfSerializer.TASK_SIMPLE_STATEMENTS)) return; if (statement.getQualifiers().size() == 0) { super.writeSimpleStatement(statement); } } @Override public void writeFullStatement(Statement statement, boolean best) throws RDFHandlerException { if (!hasTask(RdfSerializer.TASK_STATEMENTS)) return; super.writeFullStatement(statement, best); } @Override public void writeItemDocument(ItemDocument document) throws RDFHandlerException { if (!hasTask(RdfSerializer.TASK_ITEMS)) return; super.writeItemDocument(document); } @Override public void writePropertyDocument(PropertyDocument document) throws RDFHandlerException { if (!hasTask(RdfSerializer.TASK_PROPERTIES)) return; super.writePropertyDocument(document); } @Override public void writeOWLDeclarations() { this.owlDeclarationBuffer.writePropertyDeclarations(this.rdfWriter, this.hasTask(RdfSerializer.TASK_STATEMENTS), this.hasTask(RdfSerializer.TASK_SIMPLE_STATEMENTS)); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/RdfSerializer.java000066400000000000000000000115071444772566300307060ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.OutputStream; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentDumpProcessor; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.Sites; /** * This class implements {@link EntityDocumentDumpProcessor} to provide a RDF * serializer to render RDF graphs of {@link EntityDocument} objects. * * @author Michael Günther * */ public class RdfSerializer implements EntityDocumentDumpProcessor { static final Logger logger = LoggerFactory.getLogger(RdfSerializer.class); public static final int TASK_STATEMENTS = 0x00000001; public static final int TASK_SITELINKS = 0x00000002; public static final int TASK_DATATYPES = 0x00000004; public static final int TASK_PROPERTY_LINKS = 0x00000080; public static final int TASK_LABELS = 0x00000010; public static final int TASK_DESCRIPTIONS = 0x00000020; public static final int TASK_ALIASES = 0x00000040; public static final int TASK_TERMS = TASK_LABELS | TASK_DESCRIPTIONS | TASK_ALIASES; public static final int TASK_ALL_EXACT_DATA = TASK_TERMS | TASK_STATEMENTS | TASK_SITELINKS | TASK_DATATYPES | TASK_PROPERTY_LINKS; public static final int TASK_SIMPLE_STATEMENTS = 0x00040000; public static final int TASK_ITEMS = 0x00000100; public static final int TASK_PROPERTIES = 0x00000200; public static final int TASK_ALL_ENTITIES = TASK_ITEMS | TASK_PROPERTIES; final OutputStream output; final RdfConverter rdfConverter; final RdfWriter rdfWriter; /** * Creates a new RDF serializer for the specified format and output stream. * * @param format * RDF format, such as RDFFormat.TURTLE * @param output * the output stream to write to * @param sites * information about site links */ public RdfSerializer(RDFFormat format, OutputStream output, Sites sites, PropertyRegister propertyRegister) { this.output = output; this.rdfWriter = new RdfWriter(format, output); this.rdfConverter = new RdfConverter(this.rdfWriter, sites, propertyRegister); } /** * Sets the tasks that should be performed during export. The value should * be a combination of flags such as {@link RdfSerializer#TASK_STATEMENTS}. * * @param tasks * the tasks to be performed */ public void setTasks(int tasks) { this.rdfConverter.setTasks(tasks); } /** * Returns the tasks that should be performed during export. The value * should be a combination of flags such as * {@link RdfSerializer#TASK_STATEMENTS}. * * @return tasks to be performed */ public int getTasks() { return this.rdfConverter.getTasks(); } /** * Returns the number of triples that have been written so far. * * @return number of triples */ public long getTripleCount() { return this.rdfWriter.getTripleCount(); } @Override public void open() { try { this.rdfWriter.start(); this.rdfConverter.writeNamespaceDeclarations(); this.rdfConverter.writeBasicDeclarations(); } catch (RDFHandlerException e) { // we cannot recover here throw new RuntimeException(e.toString(), e); } } @Override public void processItemDocument(ItemDocument itemDocument) { try { this.rdfConverter.writeItemDocument(itemDocument); } catch (RDFHandlerException e) { // we cannot recover here throw new RuntimeException(e.toString(), e); } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { try { this.rdfConverter.writePropertyDocument(propertyDocument); } catch (RDFHandlerException e) { // we cannot recover here throw new RuntimeException(e.toString(), e); } } @Override public void close() { try { this.rdfWriter.finish(); } catch (RDFHandlerException e) { // we cannot recover here throw new RuntimeException(e.toString(), e); } try { this.output.close(); } catch (IOException e) { throw new RuntimeException(e.toString(), e); } } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/RdfWriter.java000066400000000000000000000224061444772566300300510ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.OutputStream; import org.eclipse.rdf4j.model.BNode; import org.eclipse.rdf4j.model.Literal; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFWriter; import org.eclipse.rdf4j.rio.Rio; import org.eclipse.rdf4j.rio.UnsupportedRDFormatException; /** * This class provides methods for writing RDF data to an output stream. It * encapsulates many details of the RDF library we use. It also provides a * unique point at which statistics about the number of triples etc. can be * gathered. * * @author Markus Kroetzsch * */ public class RdfWriter { static final ValueFactory factory = SimpleValueFactory.getInstance(); public static final IRI RDF_TYPE = factory.createIRI(Vocabulary.RDF_TYPE); public static final IRI RDFS_LABEL = factory .createIRI(Vocabulary.RDFS_LABEL); public static final IRI RDFS_SEE_ALSO = factory .createIRI(Vocabulary.RDFS_SEE_ALSO); public static final IRI RDFS_LITERAL = factory .createIRI(Vocabulary.RDFS_LITERAL); public static final IRI RDFS_SUBCLASS_OF = factory .createIRI(Vocabulary.RDFS_SUBCLASS_OF); public static final IRI RDFS_SUBPROPERTY_OF = factory .createIRI(Vocabulary.RDFS_SUBPROPERTY_OF); public static final IRI OWL_THING = factory.createIRI(Vocabulary.OWL_THING); public static final IRI OWL_CLASS = factory.createIRI(Vocabulary.OWL_CLASS); public static final IRI OWL_OBJECT_PROPERTY = factory .createIRI(Vocabulary.OWL_OBJECT_PROPERTY); public static final IRI OWL_DATATYPE_PROPERTY = factory .createIRI(Vocabulary.OWL_DATATYPE_PROPERTY); public static final IRI OWL_RESTRICTION = factory .createIRI(Vocabulary.OWL_RESTRICTION); public static final IRI OWL_SOME_VALUES_FROM = factory .createIRI(Vocabulary.OWL_SOME_VALUES_FROM); public static final IRI OWL_ON_PROPERTY = factory .createIRI(Vocabulary.OWL_ON_PROPERTY); public static final IRI OWL_COMPLEMENT_OF = factory .createIRI(Vocabulary.OWL_COMPLEMENT_OF); public static final IRI XSD_DOUBLE = factory .createIRI(Vocabulary.XSD_DOUBLE); public static final IRI XSD_DECIMAL = factory .createIRI(Vocabulary.XSD_DECIMAL); public static final IRI XSD_INT = factory.createIRI(Vocabulary.XSD_INT); public static final IRI XSD_DATE = factory.createIRI(Vocabulary.XSD_DATE); public static final IRI XSD_G_YEAR = factory .createIRI(Vocabulary.XSD_G_YEAR); public static final IRI XSD_G_YEAR_MONTH = factory .createIRI(Vocabulary.XSD_G_YEAR_MONTH); public static final IRI XSD_DATETIME = factory .createIRI(Vocabulary.XSD_DATETIME); public static final IRI XSD_STRING = factory .createIRI(Vocabulary.XSD_STRING); public static final IRI SKOS_ALT_LABEL = factory .createIRI(Vocabulary.SKOS_ALT_LABEL); public static final IRI SCHEMA_ABOUT = factory .createIRI(Vocabulary.SCHEMA_ABOUT); public static final IRI SCHEMA_ARTICLE = factory .createIRI(Vocabulary.SCHEMA_ARTICLE); public static final IRI SCHEMA_DESCRIPTION = factory .createIRI(Vocabulary.SCHEMA_DESCRIPTION); public static final IRI SCHEMA_IN_LANGUAGE = factory .createIRI(Vocabulary.SCHEMA_IN_LANGUAGE); public static final IRI PROV_WAS_DERIVED_FROM = factory .createIRI(Vocabulary.PROV_WAS_DERIVED_FROM); public static final IRI WB_ITEM = factory.createIRI(Vocabulary.WB_ITEM); public static final IRI WB_REFERENCE = factory .createIRI(Vocabulary.WB_REFERENCE); public static final IRI WB_PROPERTY = factory .createIRI(Vocabulary.WB_PROPERTY); public static final IRI WB_STATEMENT = factory .createIRI(Vocabulary.WB_STATEMENT); public static final IRI WB_QUANTITY_VALUE = factory .createIRI(Vocabulary.WB_QUANTITY_VALUE); public static final IRI WB_TIME_VALUE = factory .createIRI(Vocabulary.WB_TIME_VALUE); public static final IRI WB_GLOBE_COORDINATES_VALUE = factory .createIRI(Vocabulary.WB_GLOBE_COORDINATES_VALUE); public static final IRI WB_PROPERTY_TYPE = factory .createIRI(Vocabulary.WB_PROPERTY_TYPE); public static final IRI WB_GEO_GLOBE = factory.createIRI(Vocabulary.WB_GEO_GLOBE); public static final IRI WB_GEO_LATITUDE = factory .createIRI(Vocabulary.WB_GEO_LATITUDE); public static final IRI WB_GEO_LONGITUDE = factory .createIRI(Vocabulary.WB_GEO_LONGITUDE); public static final IRI WB_GEO_PRECISION = factory .createIRI(Vocabulary.WB_GEO_PRECISION); public static final IRI WB_TIME = factory.createIRI(Vocabulary.WB_TIME); public static final IRI WB_TIME_PRECISION = factory .createIRI(Vocabulary.WB_TIME_PRECISION); public static final IRI WB_TIME_TIMEZONE = factory .createIRI(Vocabulary.WB_TIME_TIMEZONE); public static final IRI WB_TIME_CALENDAR_MODEL = factory .createIRI(Vocabulary.WB_TIME_CALENDAR_MODEL); public static final IRI WB_QUANTITY_AMOUNT = factory .createIRI(Vocabulary.WB_QUANTITY_AMOUNT); public static final IRI WB_QUANTITY_LOWER_BOUND = factory .createIRI(Vocabulary.WB_QUANTITY_LOWER_BOUND); public static final IRI WB_QUANTITY_UPPER_BOUND = factory .createIRI(Vocabulary.WB_QUANTITY_UPPER_BOUND); public static final IRI WB_QUANTITY_UNIT = factory .createIRI(Vocabulary.WB_QUANTITY_UNIT); public static final IRI OGC_LOCATION = factory .createIRI(Vocabulary.OGC_LOCATION); public static final IRI WB_RANK = factory.createIRI(Vocabulary.WB_RANK); public static final IRI WB_BEST_RANK = factory .createIRI(Vocabulary.WB_BEST_RANK); public static final IRI WB_BADGE = factory.createIRI(Vocabulary.WB_BADGE); RDFWriter writer; long tripleCount = 0; public RdfWriter(RDFFormat format, OutputStream output) throws UnsupportedRDFormatException { this(Rio.createWriter(format, output)); } public RdfWriter(RDFWriter writer) { this.writer = writer; } public long getTripleCount() { return this.tripleCount; } public void start() throws RDFHandlerException { this.tripleCount = 0; this.writer.startRDF(); } public void finish() throws RDFHandlerException { this.writer.endRDF(); } public BNode getFreshBNode() { return factory.createBNode(); } /** * Creates a IRI object for the given IRI string. Callers who use this with * user-provided data should check for exceptions. * * @param uri * the IRI string * @return the IRI object * @throws IllegalArgumentException * if the string is not a valid absolute URI. */ public IRI getUri(String uri) { return factory.createIRI(uri); } public Literal getLiteral(String value) { return factory.createLiteral(value); } public Literal getLiteral(String value, String languageCode) { return factory.createLiteral(value, languageCode); } public Literal getLiteral(String value, IRI datatypeUri) { return factory.createLiteral(value, datatypeUri); } public void writeNamespaceDeclaration(String prefix, String uri) throws RDFHandlerException { this.writer.handleNamespace(prefix, uri); } public void writeTripleStringObject(Resource subject, IRI predicate, String objectLiteral) throws RDFHandlerException { writeTripleValueObject(subject, predicate, factory.createLiteral(objectLiteral)); } public void writeTripleIntegerObject(Resource subject, IRI predicate, int objectLiteral) throws RDFHandlerException { writeTripleValueObject(subject, predicate, factory.createLiteral(objectLiteral)); } public void writeTripleUriObject(String subjectUri, IRI predicate, String objectUri) throws RDFHandlerException { writeTripleValueObject(subjectUri, predicate, factory.createIRI(objectUri)); } public void writeTripleUriObject(Resource subject, IRI predicate, String objectUri) throws RDFHandlerException { writeTripleValueObject(subject, predicate, factory.createIRI(objectUri)); } public void writeTripleValueObject(String subjectUri, IRI predicate, Value object) throws RDFHandlerException { IRI subject = factory.createIRI(subjectUri); this.tripleCount++; this.writer.handleStatement(factory.createStatement(subject, predicate, object)); } public void writeTripleValueObject(Resource subject, IRI predicate, Value object) throws RDFHandlerException { this.tripleCount++; this.writer.handleStatement(factory.createStatement(subject, predicate, object)); } public void writeTripleLiteralObject(Resource subject, IRI predicate, String objectLexicalValue, IRI datatype) throws RDFHandlerException { Literal object = factory.createLiteral(objectLexicalValue, datatype); this.tripleCount++; this.writer.handleStatement(factory.createStatement(subject, predicate, object)); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/ReferenceRdfConverter.java000066400000000000000000000077001444772566300323630ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.*; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; /** * This class supports the conversion of references to RDF. It buffers * references to avoid duplicates and to allow reference triples to be * serialized separately (for more efficient encodings in syntaxes like Turtle * or RDF/XML). * * @author Markus Kroetzsch * */ public class ReferenceRdfConverter { final RdfWriter rdfWriter; final SnakRdfConverter snakRdfConverter; final List referenceQueue = new ArrayList<>(); final List referenceSubjectQueue = new ArrayList<>(); final Set declaredReferences = new HashSet<>(); final String siteUri; /** * Constructor. * * @param rdfWriter * object to use for constructing URI objects * @param snakRdfConverter * object to use for writing snaks * @param siteUri * URI prefix that is used by the processed site */ public ReferenceRdfConverter(RdfWriter rdfWriter, SnakRdfConverter snakRdfConverter, String siteUri) { this.rdfWriter = rdfWriter; this.snakRdfConverter = snakRdfConverter; this.siteUri = siteUri; } /** * Adds the given reference to the list of references that should still be * serialized, and returns the RDF resource that will be used as a subject. * * @param reference * the reference to be serialized * @return RDF resource that represents this reference */ public Resource addReference(Reference reference) { Resource resource = this.rdfWriter.getUri(Vocabulary.getReferenceUri(reference)); this.referenceQueue.add(reference); this.referenceSubjectQueue.add(resource); return resource; } /** * Writes references that have been added recently. Auxiliary triples that * are generated for serializing snaks in references will be written right * afterwards. This will also trigger any other auxiliary triples to be * written that the snak converter object may have buffered. * * @throws RDFHandlerException * if there was a problem writing the restrictions */ public void writeReferences() throws RDFHandlerException { Iterator referenceIterator = this.referenceQueue.iterator(); for (Resource resource : this.referenceSubjectQueue) { final Reference reference = referenceIterator.next(); if (this.declaredReferences.add(resource)) { writeReference(reference, resource); } } this.referenceSubjectQueue.clear(); this.referenceQueue.clear(); this.snakRdfConverter.writeAuxiliaryTriples(); } void writeReference(Reference reference, Resource resource) throws RDFHandlerException { this.rdfWriter.writeTripleValueObject(resource, RdfWriter.RDF_TYPE, RdfWriter.WB_REFERENCE); for (SnakGroup snakGroup : reference.getSnakGroups()) { this.snakRdfConverter.setSnakContext(resource, PropertyContext.REFERENCE); for (Snak snak : snakGroup) { snak.accept(this.snakRdfConverter); } this.snakRdfConverter.setSnakContext(resource, PropertyContext.REFERENCE_SIMPLE); for (Snak snak : snakGroup) { snak.accept(this.snakRdfConverter); } } } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/SnakRdfConverter.java000066400000000000000000000247451444772566300313710ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.List; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.NoValueSnak; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakVisitor; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import org.wikidata.wdtk.rdf.values.AnyValueConverter; /** * Class to convert Wikibase snaks to RDF. The main entry point for this class * is {@link #writeSnak(Snak, Resource, PropertyContext)}. Alternatively, one * can use {@link #setSnakContext(Resource, PropertyContext)} and use the class * as a visitor. {@link SomeValueSnak} and {@link NoValueSnak} require further * RDF triples to be written; the same is true for some complex data values that * might be used with {@link ValueSnak}. In such cases, the class stores the * values to a buffer. Methods for writing additional triples for these buffered * values can be called later. * * @author Markus Kroetzsch * */ public class SnakRdfConverter implements SnakVisitor { /** * Local value class for storing information about property restrictions. * * @author Markus Kroetzsch * */ private static class PropertyRestriction { final Resource subject; final String propertyUri; final String rangeUri; PropertyRestriction(Resource subject, String propertyUri, String rangeUri) { this.subject = subject; this.propertyUri = propertyUri; this.rangeUri = rangeUri; } } static final Logger logger = LoggerFactory .getLogger(SnakRdfConverter.class); final AnyValueConverter valueRdfConverter; final RdfWriter rdfWriter; final PropertyRegister propertyRegister; final OwlDeclarationBuffer rdfConversionBuffer; final List someValuesQueue; Resource currentSubject; PropertyContext currentPropertyContext; boolean simple; public SnakRdfConverter(RdfWriter rdfWriter, OwlDeclarationBuffer owlDeclarationBuffer, PropertyRegister propertyRegister, AnyValueConverter valueRdfConverter) { this.rdfWriter = rdfWriter; this.rdfConversionBuffer = owlDeclarationBuffer; this.propertyRegister = propertyRegister; this.valueRdfConverter = valueRdfConverter; this.someValuesQueue = new ArrayList<>(); } /** * Writes the given snak for the given subject. The context defines if the * snak is used as a main snak, qualifier, or in a reference. Some data * might be buffered instead of being written immediately. The method * {@link #writeAuxiliaryTriples()} needs to be called to serialize this * additional data later on. * * @param snak * the snake to write * @param subject * the resource that should be used as a subject of the serialied * triples * @param propertyContext * the context in which the snak is used */ public void writeSnak(Snak snak, Resource subject, PropertyContext propertyContext) { setSnakContext(subject, propertyContext); snak.accept(this); } /** * Sets the context in which snaks should be used. This is useful when * converting many snaks that have the same context. In this case, one can * set the context manually and use the converter as a {@link SnakVisitor}. * * @param subject * the resource that should be used as a subject of the serialied * triples * @param propertyContext * the context in which the snaks that are to be converted are * used */ public void setSnakContext(Resource subject, PropertyContext propertyContext) { this.currentSubject = subject; this.currentPropertyContext = propertyContext; this.simple = (this.currentPropertyContext == PropertyContext.DIRECT) || (this.currentPropertyContext == PropertyContext.VALUE_SIMPLE) || (this.currentPropertyContext == PropertyContext.QUALIFIER_SIMPLE) || (this.currentPropertyContext == PropertyContext.REFERENCE_SIMPLE); } @Override public Void visit(ValueSnak snak) { String propertyUri = Vocabulary.getPropertyUri(snak.getPropertyId(), this.currentPropertyContext); IRI property = this.rdfWriter.getUri(propertyUri); Value value = valueRdfConverter.getRdfValue(snak.getValue(), snak.getPropertyId(), this.simple); if (value == null) { // if there is no complex representation and simple = false return null; } try { this.rdfWriter.writeTripleValueObject(this.currentSubject, property, value); } catch (RDFHandlerException e) { throw new RuntimeException(e.toString(), e); } return null; } @Override public Void visit(SomeValueSnak snak) { String rangeUri = getRangeUri(snak.getPropertyId()); if (rangeUri == null) { logger.error("Count not export SomeValueSnak for property " + snak.getPropertyId().getId() + ": OWL range not known."); return null; } // SomeValueSnaks only have simple values not full values if (this.currentPropertyContext == PropertyContext.VALUE || this.currentPropertyContext == PropertyContext.QUALIFIER || this.currentPropertyContext == PropertyContext.REFERENCE) { return null; } String propertyUri = Vocabulary.getPropertyUri(snak.getPropertyId(), this.currentPropertyContext); Resource bnode = this.rdfWriter.getFreshBNode(); try { this.rdfWriter.writeTripleValueObject(this.currentSubject, this.rdfWriter.getUri(propertyUri), bnode); } catch (RDFHandlerException e) { throw new RuntimeException(e.toString(), e); } return null; } @Override public Void visit(NoValueSnak snak) { if (simple) { if (getRangeUri(snak.getPropertyId()) == null) { logger.error("Count not export NoValueSnak for property " + snak.getPropertyId().getId() + ": OWL range not known."); return null; } String noValueClass; if ((this.currentPropertyContext == PropertyContext.QUALIFIER) || (this.currentPropertyContext == PropertyContext.QUALIFIER_SIMPLE)) { noValueClass = Vocabulary.getPropertyUri(snak.getPropertyId(), PropertyContext.NO_QUALIFIER_VALUE); } else { noValueClass = Vocabulary.getPropertyUri(snak.getPropertyId(), PropertyContext.NO_VALUE); } // TODO add restrictions try { this.rdfWriter.writeTripleUriObject(this.currentSubject, RdfWriter.RDF_TYPE, noValueClass); } catch (RDFHandlerException e) { throw new RuntimeException(e.toString(), e); } } return null; } /** * Writes all auxiliary triples that have been buffered recently. This * includes OWL property restrictions but it also includes any auxiliary * triples required by complex values that were used in snaks. * * @throws RDFHandlerException * if there was a problem writing the RDF triples */ public void writeAuxiliaryTriples() throws RDFHandlerException { for (PropertyRestriction pr : this.someValuesQueue) { writeSomeValueRestriction(pr.propertyUri, pr.rangeUri, pr.subject); } this.someValuesQueue.clear(); this.valueRdfConverter.writeAuxiliaryTriples(); } /** * Writes a buffered some-value restriction. * * @param propertyUri * URI of the property to which the restriction applies * @param rangeUri * URI of the class or datatype to which the restriction applies * @param bnode * blank node representing the restriction * @throws RDFHandlerException * if there was a problem writing the RDF triples */ void writeSomeValueRestriction(String propertyUri, String rangeUri, Resource bnode) throws RDFHandlerException { this.rdfWriter.writeTripleValueObject(bnode, RdfWriter.RDF_TYPE, RdfWriter.OWL_RESTRICTION); this.rdfWriter.writeTripleUriObject(bnode, RdfWriter.OWL_ON_PROPERTY, propertyUri); this.rdfWriter.writeTripleUriObject(bnode, RdfWriter.OWL_SOME_VALUES_FROM, rangeUri); } /** * Returns the class of datatype URI that best characterizes the range of * the given property based on its datatype. * * @param propertyIdValue * the property for which to get a range * @return the range URI or null if the datatype could not be identified. */ String getRangeUri(PropertyIdValue propertyIdValue) { String datatype = this.propertyRegister .getPropertyType(propertyIdValue); if (datatype == null) return null; switch (datatype) { case DatatypeIdValue.DT_MONOLINGUAL_TEXT: this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue); return Vocabulary.RDF_LANG_STRING; case DatatypeIdValue.DT_STRING: case DatatypeIdValue.DT_EXTERNAL_ID: case DatatypeIdValue.DT_MATH: this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue); return Vocabulary.XSD_STRING; case DatatypeIdValue.DT_COMMONS_MEDIA: case DatatypeIdValue.DT_GLOBE_COORDINATES: case DatatypeIdValue.DT_ITEM: case DatatypeIdValue.DT_PROPERTY: case DatatypeIdValue.DT_LEXEME: case DatatypeIdValue.DT_FORM: case DatatypeIdValue.DT_SENSE: case DatatypeIdValue.DT_TIME: case DatatypeIdValue.DT_URL: case DatatypeIdValue.DT_GEO_SHAPE: case DatatypeIdValue.DT_TABULAR_DATA: case DatatypeIdValue.DT_QUANTITY: this.rdfConversionBuffer.addObjectProperty(propertyIdValue); return Vocabulary.OWL_THING; default: return null; } } /** * Adds the given some-value restriction to the list of restrictions that * should still be serialized. The given resource will be used as a subject. * * @param subject * @param propertyUri * @param rangeUri */ void addSomeValuesRestriction(Resource subject, String propertyUri, String rangeUri) { this.someValuesQueue.add(new PropertyRestriction(subject, propertyUri, rangeUri)); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/Vocabulary.java000066400000000000000000000526151444772566300302550ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; import org.wikidata.wdtk.datamodel.interfaces.*; import org.wikidata.wdtk.wikibaseapi.GuidGenerator; import org.wikidata.wdtk.wikibaseapi.RandomGuidGenerator; /** * This class contains static methods and constants that define the various OWL * and RDF vocabularies that are used in the export. * * @author Markus Kroetzsch * */ public class Vocabulary { final static MessageDigest md; static { try { md = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException( "Your Java does not support MD5 hashes. You should be concerned."); } } private final static GuidGenerator GUID_GENERATOR = new RandomGuidGenerator(); // Prefixes public static final String PREFIX_WIKIDATA_STATEMENT = "http://www.wikidata.org/entity/statement/"; public static final String PREFIX_PROPERTY = "http://www.wikidata.org/prop/"; public static final String PREFIX_PROPERTY_STATEMENT = "http://www.wikidata.org/prop/statement/"; public static final String PREFIX_PROPERTY_STATEMENT_VALUE = "http://www.wikidata.org/prop/statement/value/"; public static final String PREFIX_PROPERTY_DIRECT = "http://www.wikidata.org/prop/direct/"; public static final String PREFIX_PROPERTY_QUALIFIER = "http://www.wikidata.org/prop/qualifier/"; public static final String PREFIX_PROPERTY_QUALIFIER_VALUE = "http://www.wikidata.org/prop/qualifier/value/"; public static final String PREFIX_PROPERTY_REFERENCE = "http://www.wikidata.org/prop/reference/"; public static final String PREFIX_PROPERTY_REFERENCE_VALUE = "http://www.wikidata.org/prop/reference/value/"; public static final String PREFIX_GEO = "http://www.opengis.net/ont/geosparql#"; public static final String PREFIX_WIKIDATA_REFERENCE = "http://www.wikidata.org/reference/"; public static final String PREFIX_WIKIDATA_NO_VALUE = "http://www.wikidata.org/prop/novalue/"; public static final String PREFIX_WIKIDATA_NO_QUALIFIER_VALUE = PREFIX_WIKIDATA_NO_VALUE; public static final String PREFIX_WIKIDATA_VALUE = "http://www.wikidata.org/value/"; public static final String PREFIX_WBONTO = "http://wikiba.se/ontology#"; public static final String PREFIX_RDF = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; public static final String PREFIX_RDFS = "http://www.w3.org/2000/01/rdf-schema#"; public static final String PREFIX_OWL = "http://www.w3.org/2002/07/owl#"; public static final String PREFIX_XSD = "http://www.w3.org/2001/XMLSchema#"; public static final String PREFIX_SCHEMA = "http://schema.org/"; public static final String PREFIX_SKOS = "http://www.w3.org/2004/02/skos/core#"; public static final String PREFIX_PROV = "http://www.w3.org/ns/prov#"; // Vocabulary elements that are part of ontology language standards public static final String RDF_TYPE = PREFIX_RDF + "type"; public static final String RDF_LANG_STRING = PREFIX_RDF + "langString"; public static final String RDFS_LABEL = PREFIX_RDFS + "label"; public static final String RDFS_SEE_ALSO = PREFIX_RDFS + "seeAlso"; public static final String RDFS_LITERAL = PREFIX_RDFS + "Literal"; public static final String RDFS_SUBCLASS_OF = PREFIX_RDFS + "subClassOf"; public static final String RDFS_SUBPROPERTY_OF = PREFIX_RDFS + "subPropertyOf"; public static final String OWL_THING = PREFIX_OWL + "Thing"; public static final String OWL_CLASS = PREFIX_OWL + "Class"; public static final String OWL_OBJECT_PROPERTY = PREFIX_OWL + "ObjectProperty"; public static final String OWL_DATATYPE_PROPERTY = PREFIX_OWL + "DatatypeProperty"; public static final String OWL_RESTRICTION = PREFIX_OWL + "Restriction"; public static final String OWL_SOME_VALUES_FROM = PREFIX_OWL + "someValuesFrom"; public static final String OWL_ON_PROPERTY = PREFIX_OWL + "onProperty"; public static final String OWL_COMPLEMENT_OF = PREFIX_OWL + "complementOf"; public static final String XSD_DOUBLE = PREFIX_XSD + "double"; public static final String XSD_DECIMAL = PREFIX_XSD + "decimal"; public static final String XSD_INT = PREFIX_XSD + "int"; public static final String XSD_DATE = PREFIX_XSD + "date"; public static final String XSD_G_YEAR = PREFIX_XSD + "gYear"; public static final String XSD_G_YEAR_MONTH = PREFIX_XSD + "gYearMonth"; public static final String XSD_DATETIME = PREFIX_XSD + "dateTime"; public static final String XSD_STRING = PREFIX_XSD + "string"; public static final String OGC_LOCATION = PREFIX_GEO + "wktLiteral"; /* * IRIs of property datatypes */ public static final String DT_ITEM = PREFIX_WBONTO + "WikibaseItem"; public static final String DT_PROPERTY = PREFIX_WBONTO + "WikibaseProperty"; public static final String DT_LEXEME = PREFIX_WBONTO + "WikibaseLexeme"; public static final String DT_FORM = PREFIX_WBONTO + "WikibaseForm"; public static final String DT_SENSE = PREFIX_WBONTO + "WikibaseSense"; public static final String DT_MEDIA_INFO = PREFIX_WBONTO + "WikibaseMediaInfo"; public static final String DT_STRING = PREFIX_WBONTO + "String"; public static final String DT_URL = PREFIX_WBONTO + "Url"; public static final String DT_COMMONS_MEDIA = PREFIX_WBONTO + "CommonsMedia"; public static final String DT_TIME = PREFIX_WBONTO + "Time"; public static final String DT_GLOBE_COORDINATES = PREFIX_WBONTO + "GlobeCoordinate"; public static final String DT_QUANTITY = PREFIX_WBONTO + "Quantity"; public static final String DT_MONOLINGUAL_TEXT = PREFIX_WBONTO + "Monolingualtext"; public static final String DT_EXTERNAL_ID = PREFIX_WBONTO + "ExternalId"; public static final String DT_MATH = PREFIX_WBONTO + "Math"; public static final String DT_GEO_SHAPE = PREFIX_WBONTO + "GeoShape"; public static final String DT_TABULAR_DATA = PREFIX_WBONTO + "TabularData"; public static final String DT_EDTF = PREFIX_WBONTO + "Edtf"; /** * Hash map defining the OWL declaration types of the standard vocabulary. * Declaring this explicitly is useful to obtain a self-contained RDF file, * even when importing ontologies that provide further details on some of * the vocabulary. */ static final Map VOCABULARY_TYPES = new HashMap<>(); // Vocabulary elements that are not declared by the ontology language /** * Property "altLabel" of SKOS. */ public static final String SKOS_ALT_LABEL = PREFIX_SKOS + "altLabel"; static { VOCABULARY_TYPES.put(SKOS_ALT_LABEL, OWL_DATATYPE_PROPERTY); } /** * Property "about" of schema.org. */ public static final String SCHEMA_ABOUT = PREFIX_SCHEMA + "about"; static { VOCABULARY_TYPES.put(SCHEMA_ABOUT, OWL_OBJECT_PROPERTY); } /** * Class for Wikipedia articles. */ public static final String SCHEMA_ARTICLE = PREFIX_SCHEMA + "Article"; static { VOCABULARY_TYPES.put(SCHEMA_ARTICLE, OWL_CLASS); } /** * Property "description" of schema.org. */ public static final String SCHEMA_DESCRIPTION = PREFIX_SCHEMA + "description"; static { VOCABULARY_TYPES.put(SCHEMA_DESCRIPTION, OWL_DATATYPE_PROPERTY); } /** * Property "inLanguage" of schema.org. */ public static final String SCHEMA_IN_LANGUAGE = PREFIX_SCHEMA + "inLanguage"; static { VOCABULARY_TYPES.put(SCHEMA_IN_LANGUAGE, OWL_DATATYPE_PROPERTY); } /** * Property "wasDerivedFrom" of the provenance ontology. */ public static final String PROV_WAS_DERIVED_FROM = PREFIX_PROV + "wasDerivedFrom"; static { VOCABULARY_TYPES.put(PROV_WAS_DERIVED_FROM, OWL_OBJECT_PROPERTY); } // Wikibase ontology /** * Class for Wikibase items. */ public static final String WB_ITEM = PREFIX_WBONTO + "Item"; static { VOCABULARY_TYPES.put(WB_ITEM, OWL_CLASS); } /** * Class for Wikibase references. */ public static final String WB_REFERENCE = PREFIX_WBONTO + "Reference"; static { VOCABULARY_TYPES.put(WB_REFERENCE, OWL_CLASS); } /** * Class for Wikibase properties. */ public static final String WB_PROPERTY = PREFIX_WBONTO + "Property"; static { VOCABULARY_TYPES.put(WB_PROPERTY, OWL_CLASS); } /** * Class for Wikibase statements. */ public static final String WB_STATEMENT = PREFIX_WBONTO + "Statement"; static { VOCABULARY_TYPES.put(WB_STATEMENT, OWL_CLASS); } /** * Class for Wikibase quantity values. */ public static final String WB_QUANTITY_VALUE = PREFIX_WBONTO + "QuantityValue"; static { VOCABULARY_TYPES.put(WB_QUANTITY_VALUE, OWL_CLASS); } /** * Class for Wikibase time values. */ public static final String WB_TIME_VALUE = PREFIX_WBONTO + "TimeValue"; static { VOCABULARY_TYPES.put(WB_TIME_VALUE, OWL_CLASS); } /** * Class for Wikibase globe coordinates values. */ public static final String WB_GLOBE_COORDINATES_VALUE = PREFIX_WBONTO + "GlobecoordinateValue"; static { VOCABULARY_TYPES.put(WB_GLOBE_COORDINATES_VALUE, OWL_CLASS); } /** * Property for defining the datatype of a Wikibase property. */ public static final String WB_PROPERTY_TYPE = PREFIX_WBONTO + "propertyType"; static { VOCABULARY_TYPES.put(WB_PROPERTY_TYPE, OWL_OBJECT_PROPERTY); } /** * Property for Wikibase rank. */ public static final String WB_RANK = PREFIX_WBONTO + "rank"; static { VOCABULARY_TYPES.put(WB_RANK, OWL_OBJECT_PROPERTY); } /** * Class for Wikibase best rank. */ public static final String WB_BEST_RANK = PREFIX_WBONTO + "BestRank"; static { VOCABULARY_TYPES.put(WB_BEST_RANK, OWL_CLASS); } /** * Property for defining the globe of a globe coordinates value. */ public static final String WB_GEO_GLOBE = PREFIX_WBONTO + "geoGlobe"; static { VOCABULARY_TYPES.put(WB_GEO_GLOBE, OWL_OBJECT_PROPERTY); } /** * Property for defining the latitude of a globe coordinates value. */ public static final String WB_GEO_LATITUDE = PREFIX_WBONTO + "geoLatitude"; static { VOCABULARY_TYPES.put(WB_GEO_LATITUDE, OWL_DATATYPE_PROPERTY); } /** * Property for defining the longitude of a globe coordinates value. */ public static final String WB_GEO_LONGITUDE = PREFIX_WBONTO + "geoLongitude"; static { VOCABULARY_TYPES.put(WB_GEO_LONGITUDE, OWL_DATATYPE_PROPERTY); } /** * Property for defining the precision of a globe coordinates value. */ public static final String WB_GEO_PRECISION = PREFIX_WBONTO + "geoPrecision"; static { VOCABULARY_TYPES.put(WB_GEO_PRECISION, OWL_DATATYPE_PROPERTY); } /** * Property for defining the time point of a time value. */ public static final String WB_TIME = PREFIX_WBONTO + "timeValue"; static { VOCABULARY_TYPES.put(WB_TIME, OWL_DATATYPE_PROPERTY); } /** * Property for defining the precision of a time value. */ public static final String WB_TIME_PRECISION = PREFIX_WBONTO + "timePrecision"; static { VOCABULARY_TYPES.put(WB_TIME_PRECISION, OWL_DATATYPE_PROPERTY); } /** * Property for defining the timezone of a time value. */ public static final String WB_TIME_TIMEZONE = PREFIX_WBONTO + "timeTimezone"; static { VOCABULARY_TYPES.put(WB_TIME_TIMEZONE, OWL_DATATYPE_PROPERTY); } /** * Property for defining the preferred calendar of a time value. */ public static final String WB_TIME_CALENDAR_MODEL = PREFIX_WBONTO + "timeCalendarModel"; static { VOCABULARY_TYPES.put(WB_TIME_CALENDAR_MODEL, OWL_OBJECT_PROPERTY); } /** * Property for defining the numeric value of a quantity value. */ public static final String WB_QUANTITY_AMOUNT = PREFIX_WBONTO + "quantityAmount"; static { VOCABULARY_TYPES.put(WB_QUANTITY_AMOUNT, OWL_DATATYPE_PROPERTY); } /** * Property for defining the lower bound of a quantity value. */ public static final String WB_QUANTITY_LOWER_BOUND = PREFIX_WBONTO + "quantityLowerBound"; static { VOCABULARY_TYPES.put(WB_QUANTITY_LOWER_BOUND, OWL_DATATYPE_PROPERTY); } /** * Property for defining the upper bound of a quantity value. */ public static final String WB_QUANTITY_UPPER_BOUND = PREFIX_WBONTO + "quantityUpperBound"; static { VOCABULARY_TYPES.put(WB_QUANTITY_UPPER_BOUND, OWL_DATATYPE_PROPERTY); } /** * Property for defining the unit of a quantity value. */ public static final String WB_QUANTITY_UNIT = PREFIX_WBONTO + "quantityUnit"; static { VOCABULARY_TYPES.put(WB_QUANTITY_UNIT, OWL_OBJECT_PROPERTY); } /** * IRI used as the unit of quantities that have no unit. */ public static final String WB_NO_UNIT = "http://www.wikidata.org/entity/Q199"; public static final String WB_NORMAL_RANK = PREFIX_WBONTO + "NormalRank"; public static final String WB_PREFERRED_RANK = PREFIX_WBONTO + "PreferredRank"; public static final String WB_DEPRECATED_RANK = PREFIX_WBONTO + "DeprecatedRank"; public static final String WB_BADGE = PREFIX_WBONTO + "badge"; static { VOCABULARY_TYPES.put(WB_BADGE, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their direct value * properties (linking entities to their simplified statement's main * values). */ public static final String WB_DIRECT_CLAIM_PROP = PREFIX_WBONTO + "directClaim"; static { VOCABULARY_TYPES.put(WB_DIRECT_CLAIM_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their statement * properties (linking entities to statements). */ public static final String WB_CLAIM_PROP = PREFIX_WBONTO + "claim"; static { VOCABULARY_TYPES.put(WB_CLAIM_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their statement * main value properties (linking to a statement's main value). */ public static final String WB_STATEMENT_PROP = PREFIX_WBONTO + "statementProperty"; static { VOCABULARY_TYPES.put(WB_STATEMENT_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their statement * main value properties, value version (linking to a statement's main * value). */ public static final String WB_STATEMENT_VALUE_PROP = PREFIX_WBONTO + "statementValue"; static { VOCABULARY_TYPES.put(WB_STATEMENT_VALUE_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their qualifier * properties (linking to a statement's qualifier value). */ public static final String WB_QUALIFIER_PROP = PREFIX_WBONTO + "qualifier"; static { VOCABULARY_TYPES.put(WB_QUALIFIER_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their qualifier * value properties (linking to a statement's qualifier value). */ public static final String WB_QUALIFIER_VALUE_PROP = PREFIX_WBONTO + "qualifierValue"; static { VOCABULARY_TYPES.put(WB_QUALIFIER_VALUE_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their reference * properties. */ public static final String WB_REFERENCE_PROP = PREFIX_WBONTO + "reference"; static { VOCABULARY_TYPES.put(WB_REFERENCE_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their reference * value properties. */ public static final String WB_REFERENCE_VALUE_PROP = PREFIX_WBONTO + "referenceValue"; static { VOCABULARY_TYPES.put(WB_REFERENCE_VALUE_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their main no-value * classes. */ public static final String WB_NO_VALUE_PROP = PREFIX_WBONTO + "novalue"; static { VOCABULARY_TYPES.put(WB_NO_VALUE_PROP, OWL_OBJECT_PROPERTY); } /** * Property for connecting Wikibase property entities to their no-value * classes for qualifiers. */ public static final String WB_NO_QUALIFIER_VALUE_PROP = WB_NO_VALUE_PROP; static { VOCABULARY_TYPES.put(WB_NO_QUALIFIER_VALUE_PROP, OWL_OBJECT_PROPERTY); } /** * Returns a map that defines OWL types for all known vocabulary elements. * * @return a map from vocabulary URIs to OWL type URIs */ public static Map getKnownVocabularyTypes() { return Collections.unmodifiableMap(VOCABULARY_TYPES); } /** * Get the URI for the given statement. * * @param statement * the statement for which to create a URI * @return the URI */ public static String getStatementUri(Statement statement) { String statementId = statement.getStatementId(); if (statementId == null || statementId.isEmpty()) { statementId = GUID_GENERATOR.freshStatementId(statement.getSubject().getId()); } return PREFIX_WIKIDATA_STATEMENT + statementId.replaceFirst("\\$", "-"); } /** * Get the URI for the given property in the given context. * * @param propertyIdValue * the property id for which to create a URI * @param propertyContext * the context for which the URI will be needed * @return the URI */ public static String getPropertyUri(PropertyIdValue propertyIdValue, PropertyContext propertyContext) { switch (propertyContext) { case DIRECT: return PREFIX_PROPERTY_DIRECT + propertyIdValue.getId(); case STATEMENT: return PREFIX_PROPERTY + propertyIdValue.getId(); case VALUE_SIMPLE: return PREFIX_PROPERTY_STATEMENT + propertyIdValue.getId(); case VALUE: return PREFIX_PROPERTY_STATEMENT_VALUE + propertyIdValue.getId(); case QUALIFIER: return PREFIX_PROPERTY_QUALIFIER_VALUE + propertyIdValue.getId(); case QUALIFIER_SIMPLE: return PREFIX_PROPERTY_QUALIFIER + propertyIdValue.getId(); case REFERENCE: return PREFIX_PROPERTY_REFERENCE_VALUE + propertyIdValue.getId(); case REFERENCE_SIMPLE: return PREFIX_PROPERTY_REFERENCE + propertyIdValue.getId(); case NO_VALUE: return PREFIX_WIKIDATA_NO_VALUE + propertyIdValue.getId(); case NO_QUALIFIER_VALUE: return PREFIX_WIKIDATA_NO_QUALIFIER_VALUE + propertyIdValue.getId(); default: return null; } } public static String getReferenceUri(Reference reference) { final String hash = reference.getHash(); if (hash != null) { return PREFIX_WIKIDATA_REFERENCE + hash; } md.reset(); reference.getSnakGroups().stream() .flatMap(g -> g.getSnaks().stream()) .map(Objects::hashCode) .sorted() .forEach(i -> updateMessageDigestWithInt(md, i)); return PREFIX_WIKIDATA_REFERENCE + bytesToHex(md.digest()); } public static String getTimeValueUri(TimeValue value) { md.reset(); updateMessageDigestWithLong(md, value.getYear()); md.update(value.getMonth()); md.update(value.getDay()); md.update(value.getHour()); md.update(value.getMinute()); md.update(value.getSecond()); md.update(value.getPrecision()); updateMessageDigestWithString(md, value.getPreferredCalendarModel()); updateMessageDigestWithInt(md, value.getBeforeTolerance()); updateMessageDigestWithInt(md, value.getAfterTolerance()); updateMessageDigestWithInt(md, value.getTimezoneOffset()); return PREFIX_WIKIDATA_VALUE + bytesToHex(md.digest()); } public static String getGlobeCoordinatesValueUri(GlobeCoordinatesValue value) { md.reset(); updateMessageDigestWithString(md, value.getGlobe()); updateMessageDigestWithLong(md, Double.valueOf(value.getLatitude()) .hashCode()); updateMessageDigestWithLong(md, Double.valueOf(value.getLongitude()) .hashCode()); updateMessageDigestWithLong(md, Double.valueOf(value.getPrecision()) .hashCode()); return PREFIX_WIKIDATA_VALUE + bytesToHex(md.digest()); } public static String getQuantityValueUri(QuantityValue value) { md.reset(); updateMessageDigestWithInt(md, value.getNumericValue().hashCode()); if(value.getLowerBound() != null) { updateMessageDigestWithInt(md, value.getLowerBound().hashCode()); } if(value.getUpperBound() != null) { updateMessageDigestWithInt(md, value.getUpperBound().hashCode()); } updateMessageDigestWithInt(md, value.getUnit().hashCode()); return PREFIX_WIKIDATA_VALUE + bytesToHex(md.digest()); } public static String getStatementRankUri(StatementRank rank) { switch (rank) { case NORMAL: return Vocabulary.WB_NORMAL_RANK; case PREFERRED: return Vocabulary.WB_PREFERRED_RANK; case DEPRECATED: return Vocabulary.WB_DEPRECATED_RANK; default: throw new IllegalArgumentException(); } } static ByteBuffer longByteBuffer = ByteBuffer.allocate(Long.SIZE / 8); static void updateMessageDigestWithLong(MessageDigest md, long x) { longByteBuffer.putLong(0, x); longByteBuffer.rewind(); // important! md.update(longByteBuffer); } static ByteBuffer intByteBuffer = ByteBuffer.allocate(Integer.SIZE / 8); static void updateMessageDigestWithInt(MessageDigest md, int x) { intByteBuffer.putInt(0, x); intByteBuffer.rewind(); // important! md.update(intByteBuffer); } static void updateMessageDigestWithString(MessageDigest md, String s) { if (s == null) { return; } md.update(s.getBytes(StandardCharsets.UTF_8)); } final protected static char[] hexArray = "0123456789abcdef".toCharArray(); static String bytesToHex(byte[] bytes) { char[] hexChars = new char[bytes.length * 2]; for (int j = 0; j < bytes.length; j++) { int v = bytes[j] & 0xFF; hexChars[j * 2] = hexArray[v >>> 4]; hexChars[j * 2 + 1] = hexArray[v & 0x0F]; } return new String(hexChars); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/package-info.java000066400000000000000000000013721444772566300304640ustar00rootroot00000000000000/** * Serializer for RDF * * @author Michael Günther * */ package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/values/000077500000000000000000000000001444772566300265715ustar00rootroot00000000000000AbstractValueConverter.java000066400000000000000000000044451444772566300340140ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.rio.RDFHandlerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; public abstract class AbstractValueConverter implements ValueConverter { final PropertyRegister propertyRegister; final RdfWriter rdfWriter; final OwlDeclarationBuffer rdfConversionBuffer; static final Logger logger = LoggerFactory.getLogger(ValueConverter.class); public AbstractValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { this.rdfWriter = rdfWriter; this.propertyRegister = propertyRegister; this.rdfConversionBuffer = rdfConversionBuffer; } @Override public void writeAuxiliaryTriples() throws RDFHandlerException { // default implementation: no auxiliary triples } /** * Logs a message for a case where the value of a property does not fit to * its declared datatype. * * @param propertyIdValue * the property that was used * @param datatype * the declared type of the property * @param valueType * a string to denote the type of value */ protected void logIncompatibleValueError(PropertyIdValue propertyIdValue, String datatype, String valueType) { logger.warn("Property " + propertyIdValue.getId() + " has type \"" + datatype + "\" but a value of type " + valueType + ". Data ignored."); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/values/AnyValueConverter.java000066400000000000000000000121301444772566300330450ustar00rootroot00000000000000package org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedValue; import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; /** * Class to convert Wikibase data values to RDF. The class is a visitor that * that computes an RDF value (URI or literal) to represent any kind of Wikibase * data value. Some values are complex and require further RDF triples to be * written. In such cases, the class stores the values to a buffer. Methods for * writing additional triples for these buffered values can be called later. * * @author Markus Kroetzsch * */ public class AnyValueConverter implements ValueConverter, ValueVisitor { final private RdfWriter rdfWriter; final EntityIdValueConverter entityIdValueConverter; final StringValueConverter stringValueConverter; final TimeValueConverter timeValueConverter; final GlobeCoordinatesValueConverter globeCoordinatesValueConverter; final QuantityValueConverter quantityValueConverter; final MonolingualTextValueConverter monolingualTextValueConverter; PropertyIdValue currentPropertyIdValue; boolean simple; static final Logger logger = LoggerFactory .getLogger(AnyValueConverter.class); public AnyValueConverter(RdfWriter rdfWriter, OwlDeclarationBuffer rdfConversionBuffer, PropertyRegister propertyRegister) { this.rdfWriter = rdfWriter; this.entityIdValueConverter = new EntityIdValueConverter(rdfWriter, propertyRegister, rdfConversionBuffer); this.stringValueConverter = new StringValueConverter(rdfWriter, propertyRegister, rdfConversionBuffer); this.timeValueConverter = new TimeValueConverter(rdfWriter, propertyRegister, rdfConversionBuffer); this.globeCoordinatesValueConverter = new GlobeCoordinatesValueConverter( rdfWriter, propertyRegister, rdfConversionBuffer); this.quantityValueConverter = new QuantityValueConverter(rdfWriter, propertyRegister, rdfConversionBuffer); this.monolingualTextValueConverter = new MonolingualTextValueConverter( rdfWriter, propertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue( org.wikidata.wdtk.datamodel.interfaces.Value value, PropertyIdValue propertyIdValue, boolean simple) { this.currentPropertyIdValue = propertyIdValue; this.simple = simple; return value.accept(this); } @Override public Value visit(EntityIdValue value) { return this.entityIdValueConverter.getRdfValue(value, this.currentPropertyIdValue, this.simple); } @Override public Value visit(GlobeCoordinatesValue value) { return this.globeCoordinatesValueConverter.getRdfValue(value, this.currentPropertyIdValue, this.simple); } @Override public Value visit(MonolingualTextValue value) { return this.monolingualTextValueConverter.getRdfValue(value, this.currentPropertyIdValue, this.simple); } @Override public Value visit(QuantityValue value) { return this.quantityValueConverter.getRdfValue(value, this.currentPropertyIdValue, this.simple); } @Override public Value visit(StringValue value) { return this.stringValueConverter.getRdfValue(value, this.currentPropertyIdValue, this.simple); } @Override public Value visit(TimeValue value) { return this.timeValueConverter.getRdfValue(value, this.currentPropertyIdValue, this.simple); } @Override public void writeAuxiliaryTriples() throws RDFHandlerException { this.entityIdValueConverter.writeAuxiliaryTriples(); this.stringValueConverter.writeAuxiliaryTriples(); this.globeCoordinatesValueConverter.writeAuxiliaryTriples(); this.timeValueConverter.writeAuxiliaryTriples(); this.quantityValueConverter.writeAuxiliaryTriples(); } @Override public Value visit(UnsupportedValue value) { return this.rdfWriter.getFreshBNode(); } } BufferedValueConverter.java000066400000000000000000000051021444772566300337620ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; public abstract class BufferedValueConverter extends AbstractValueConverter { final List valueQueue = new ArrayList<>(); final List valueSubjectQueue = new ArrayList<>(); final HashSet declaredValues = new HashSet<>(); public BufferedValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, propertyRegister, rdfConversionBuffer); } /** * Adds the given value to the list of values that should still be * serialized. The given RDF resource will be used as a subject. * * @param value * the value to be serialized * @param resource * the RDF resource that is used as a subject for serialization */ void addValue(V value, Resource resource) { this.valueQueue.add(value); this.valueSubjectQueue.add(resource); } @Override public void writeAuxiliaryTriples() throws RDFHandlerException { Iterator valueIterator = this.valueQueue.iterator(); for (Resource resource : this.valueSubjectQueue) { if (!this.declaredValues.add(resource)) { valueIterator.next(); continue; } writeValue(valueIterator.next(), resource); } this.valueSubjectQueue.clear(); this.valueQueue.clear(); } /** * Writes the triples for a single value, using the given resource as * subject. * * @param value * @param resource * @throws RDFHandlerException */ public abstract void writeValue(V value, Resource resource) throws RDFHandlerException; } EntityIdValueConverter.java000066400000000000000000000042371444772566300340010ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Value; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; public class EntityIdValueConverter extends AbstractValueConverter { public EntityIdValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, propertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue(EntityIdValue value, PropertyIdValue propertyIdValue, boolean simple) { String datatype = this.propertyRegister .setPropertyTypeFromEntityIdValue(propertyIdValue, value); if(datatype == null) { // we failed to guess the datatype: represent the value by a blank node return this.rdfWriter.getFreshBNode(); } switch (datatype) { case DatatypeIdValue.DT_ITEM: case DatatypeIdValue.DT_PROPERTY: case DatatypeIdValue.DT_LEXEME: case DatatypeIdValue.DT_FORM: case DatatypeIdValue.DT_SENSE: if (simple) { this.rdfConversionBuffer.addObjectProperty(propertyIdValue); return this.rdfWriter.getUri(value.getIri()); } else { return null; } default: logIncompatibleValueError(propertyIdValue, datatype, "entity"); return this.rdfWriter.getFreshBNode(); } } } GlobeCoordinatesValueConverter.java000066400000000000000000000075741444772566300355020ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Literal; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; import org.wikidata.wdtk.rdf.Vocabulary; public class GlobeCoordinatesValueConverter extends BufferedValueConverter { public GlobeCoordinatesValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, propertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue(GlobeCoordinatesValue value, PropertyIdValue propertyIdValue, boolean simple) { String datatype = this.propertyRegister .setPropertyTypeFromGlobeCoordinatesValue(propertyIdValue, value); switch (datatype) { case DatatypeIdValue.DT_GLOBE_COORDINATES: if (simple) { return getSimpleGeoValue(value); } else { IRI valueUri = this.rdfWriter.getUri(Vocabulary.getGlobeCoordinatesValueUri(value)); this.rdfConversionBuffer.addObjectProperty(propertyIdValue); addValue(value, valueUri); return valueUri; } default: logIncompatibleValueError(propertyIdValue, datatype, "globe coordinates"); return null; } } @Override public void writeValue(GlobeCoordinatesValue value, Resource resource) throws RDFHandlerException { this.rdfWriter.writeTripleValueObject(resource, RdfWriter.RDF_TYPE, RdfWriter.WB_GLOBE_COORDINATES_VALUE); this.rdfWriter.writeTripleLiteralObject(resource, RdfWriter.WB_GEO_LATITUDE, Double.valueOf(value.getLatitude()) .toString(), RdfWriter.XSD_DOUBLE); this.rdfWriter.writeTripleLiteralObject(resource, RdfWriter.WB_GEO_LONGITUDE, Double.valueOf(value.getLongitude()) .toString(), RdfWriter.XSD_DOUBLE); this.rdfWriter.writeTripleLiteralObject(resource, RdfWriter.WB_GEO_PRECISION, Double.valueOf(value.getPrecision()) .toString(), RdfWriter.XSD_DOUBLE); IRI globeUri; try { globeUri = this.rdfWriter.getUri(value.getGlobe()); } catch (IllegalArgumentException e) { logger.warn("Invalid globe URI \"" + value.getGlobe() + "\". Assuming Earth (" + GlobeCoordinatesValue.GLOBE_EARTH + ")."); globeUri = this.rdfWriter.getUri(GlobeCoordinatesValue.GLOBE_EARTH); } this.rdfWriter.writeTripleValueObject(resource, RdfWriter.WB_GEO_GLOBE, globeUri); } private Literal getSimpleGeoValue(GlobeCoordinatesValue value) { StringBuilder builder = new StringBuilder(); if(!value.getGlobe().equals(GlobeCoordinatesValue.GLOBE_EARTH)) { builder.append("<") .append(value.getGlobe().replace(">", "%3E")) .append("> "); } builder.append("Point("); builder.append(value.getLongitude()); builder.append(" "); builder.append(value.getLatitude()); builder.append(")"); return this.rdfWriter.getLiteral(builder.toString(), RdfWriter.OGC_LOCATION); } } MonolingualTextValueConverter.java000066400000000000000000000037771444772566300354110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Value; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfConverter; import org.wikidata.wdtk.rdf.RdfWriter; public class MonolingualTextValueConverter extends AbstractValueConverter { public MonolingualTextValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, propertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue(MonolingualTextValue value, PropertyIdValue propertyIdValue, boolean simple) { String datatype = this.propertyRegister .setPropertyTypeFromMonolingualTextValue(propertyIdValue, value); switch (datatype) { case DatatypeIdValue.DT_MONOLINGUAL_TEXT: if (simple) { this.rdfConversionBuffer.addObjectProperty(propertyIdValue); return RdfConverter.getMonolingualTextValueLiteral(value, this.rdfWriter); } else { return null; // or blank node } default: logIncompatibleValueError(propertyIdValue, datatype, "entity"); return null; } } } QuantityValueConverter.java000066400000000000000000000063221444772566300340630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; import org.wikidata.wdtk.rdf.Vocabulary; public class QuantityValueConverter extends BufferedValueConverter { public QuantityValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, propertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue(QuantityValue value, PropertyIdValue propertyIdValue, boolean simple) { String datatype = this.propertyRegister .setPropertyTypeFromQuantityValue(propertyIdValue, value); switch (datatype) { case DatatypeIdValue.DT_QUANTITY: if (simple) { this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue); return this.rdfWriter.getLiteral(value.getNumericValue() .toPlainString(), RdfWriter.XSD_DECIMAL); } else { IRI valueUri = this.rdfWriter.getUri(Vocabulary.getQuantityValueUri(value)); this.rdfConversionBuffer.addObjectProperty(propertyIdValue); addValue(value, valueUri); return valueUri; } default: logIncompatibleValueError(propertyIdValue, datatype, "quantity"); return null; } } @Override public void writeValue(QuantityValue value, Resource resource) throws RDFHandlerException { this.rdfWriter.writeTripleValueObject(resource, RdfWriter.RDF_TYPE, RdfWriter.WB_QUANTITY_VALUE); this.rdfWriter.writeTripleLiteralObject(resource, RdfWriter.WB_QUANTITY_AMOUNT, value.getNumericValue().toPlainString(), RdfWriter.XSD_DECIMAL); if(value.getLowerBound() != null) { this.rdfWriter.writeTripleLiteralObject(resource, RdfWriter.WB_QUANTITY_LOWER_BOUND, value.getLowerBound().toPlainString(), RdfWriter.XSD_DECIMAL); } if(value.getUpperBound() != null) { this.rdfWriter.writeTripleLiteralObject(resource, RdfWriter.WB_QUANTITY_UPPER_BOUND, value.getUpperBound().toPlainString(), RdfWriter.XSD_DECIMAL); } String unitIri = ("1".equals(value.getUnit())) ? Vocabulary.WB_NO_UNIT : value.getUnit(); this.rdfWriter.writeTripleUriObject(resource, RdfWriter.WB_QUANTITY_UNIT, unitIri); } } StringValueConverter.java000066400000000000000000000071411444772566300335130ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Value; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; public class StringValueConverter extends AbstractValueConverter { public StringValueConverter(RdfWriter rdfWriter, PropertyRegister propertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, propertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue(StringValue value, PropertyIdValue propertyIdValue, boolean simple) { String datatype = this.propertyRegister.setPropertyTypeFromStringValue( propertyIdValue, value); String valueUriString = null; switch (datatype) { case DatatypeIdValue.DT_STRING: case DatatypeIdValue.DT_EXTERNAL_ID: case DatatypeIdValue.DT_MATH: valueUriString = null; break; case DatatypeIdValue.DT_COMMONS_MEDIA: if (simple) { valueUriString = getCommonsFileUrl(value.getString()); } break; case DatatypeIdValue.DT_URL: if (simple) { valueUriString = value.getString(); } break; case DatatypeIdValue.DT_GEO_SHAPE: case DatatypeIdValue.DT_TABULAR_DATA: if (simple) { valueUriString = getCommonsDataUrl(value.getString()); } break; default: logIncompatibleValueError(propertyIdValue, datatype, "string"); return null; } if (valueUriString == null) { if (simple) { this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue); return this.rdfWriter.getLiteral(value.getString()); } else { return null; // or blank node } } else { this.rdfConversionBuffer.addObjectProperty(propertyIdValue); try { return this.rdfWriter.getUri(valueUriString); } catch (IllegalArgumentException e) { logger.error("Invalid URI \"" + valueUriString + "\". Not serializing value."); return null; } } } /** * Returns the Wikimedia Commons page URL for the given page name. * * @param pageName * name of a page on Wikimedia Commons * @return URL of the page */ static String getCommonsFileUrl(String pageName) { try { return "http://commons.wikimedia.org/wiki/File:" + URLEncoder.encode(pageName.replace(' ', '_'), StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException e) { // can't happen throw new IllegalStateException(e); } } static String getCommonsDataUrl(String pageName) { try { return "http://commons.wikimedia.org/data/main/" + URLEncoder.encode(pageName.replace(' ', '_'), StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException e) { throw new IllegalStateException(e); } } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/values/TimeValueConverter.java000066400000000000000000000140201444772566300332140ustar00rootroot00000000000000package org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Literal; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfWriter; import org.wikidata.wdtk.rdf.Vocabulary; import java.time.Month; public class TimeValueConverter extends BufferedValueConverter { public TimeValueConverter(RdfWriter rdfWriter, PropertyRegister PropertyRegister, OwlDeclarationBuffer rdfConversionBuffer) { super(rdfWriter, PropertyRegister, rdfConversionBuffer); } @Override public Value getRdfValue(TimeValue value, PropertyIdValue propertyIdValue, boolean simple) { String datatype = this.propertyRegister.setPropertyTypeFromTimeValue( propertyIdValue, value); switch (datatype) { case DatatypeIdValue.DT_TIME: if (simple) { this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue); return TimeValueConverter.getTimeLiteral(value, this.rdfWriter); } else { IRI valueUri = this.rdfWriter.getUri(Vocabulary.getTimeValueUri(value)); this.rdfConversionBuffer.addObjectProperty(propertyIdValue); addValue(value, valueUri); return valueUri; } default: logIncompatibleValueError(propertyIdValue, datatype, "time"); return null; } } /** * Write the auxiliary RDF data for encoding the given value. * * @param value * the value to write * @param resource * the (subject) URI to use to represent this value in RDF * @throws RDFHandlerException */ @Override public void writeValue(TimeValue value, Resource resource) throws RDFHandlerException { this.rdfWriter.writeTripleValueObject(resource, RdfWriter.RDF_TYPE, RdfWriter.WB_TIME_VALUE); this.rdfWriter.writeTripleValueObject(resource, RdfWriter.WB_TIME, TimeValueConverter.getTimeLiteral(value, this.rdfWriter)); this.rdfWriter.writeTripleIntegerObject(resource, RdfWriter.WB_TIME_PRECISION, value.getPrecision()); this.rdfWriter.writeTripleIntegerObject(resource, RdfWriter.WB_TIME_TIMEZONE, value.getTimezoneOffset()); this.rdfWriter.writeTripleUriObject(resource, RdfWriter.WB_TIME_CALENDAR_MODEL, value.getPreferredCalendarModel()); } /** * Returns the RDF literal to encode the time component of a given time * value. *

* Times with limited precision are encoded using limited-precision XML * Schema datatypes, such as gYear, if available. Wikidata encodes the year * 1BCE as 0000, while XML Schema, even in version 2, does not allow 0000 * and interprets -0001 as 1BCE. Thus all negative years must be shifted by * 1, but we only do this if the year is precise. * * @param value * the value to convert * @param rdfWriter * the object to use for creating the literal * @return the RDF literal */ private static Literal getTimeLiteral(TimeValue value, RdfWriter rdfWriter) { /* we need to check for year zero before julian date conversion, since that can change the year (if the date is 1 Jan 1 for example) */ boolean yearZero = value.getYear() == 0; TimeValue gregorian = value.toGregorian(); if(gregorian != null) { value = gregorian; } long year = value.getYear(); /* https://www.mediawiki.org/wiki/Wikibase/DataModel/JSON#time says the following about the JSON mapping: The format used for Gregorian and Julian dates use a notation resembling ISO 8601. E.g. “+1994-01-01T00:00:00Z”. The year is represented by at least four digits, zeros are added on the left side as needed. Years BCE are represented as negative numbers, using the historical numbering, in which year 0 is undefined, and the year 1 BCE is represented as -0001, the year 44 BCE is represented as -0044, etc., like XSD 1.0 (ISO 8601:1988) does. In contrast, the RDF mapping relies on XSD 1.1 (ISO 8601:2004) dates that use the proleptic Gregorian calendar and astronomical year numbering, where the year 1 BCE is represented as +0000 and the year 44 BCE is represented as -0043. */ // map negative dates from historical numbering to XSD 1.1 if (year < 0 && value.getPrecision() >= TimeValue.PREC_YEAR) { year++; } byte month = value.getMonth(); byte day = value.getDay(); if ((value.getPrecision() < TimeValue.PREC_MONTH || month == 0) && !yearZero) { month = 1; } if ((value.getPrecision() < TimeValue.PREC_DAY || day == 0) && !yearZero) { day = 1; } if (value.getPrecision() >= TimeValue.PREC_DAY && !yearZero) { int maxDays = Byte.MAX_VALUE; if (month > 0 && month < 13) { boolean leap = (year % 4L) == 0L && (year % 100L != 0L || year % 400L == 0L); maxDays = Month.of(month).length(leap); } if (day > maxDays) { day = (byte)maxDays; } } String minus = year < 0 ? "-" : ""; String timestamp = String.format("%s%04d-%02d-%02dT%02d:%02d:%02dZ", minus, Math.abs(year), month, day, value.getHour(), value.getMinute(), value.getSecond()); if (yearZero) { return rdfWriter.getLiteral("+" + timestamp); } return rdfWriter.getLiteral(timestamp, RdfWriter.XSD_DATETIME); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/values/ValueConverter.java000066400000000000000000000037671444772566300324150ustar00rootroot00000000000000package org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; /** * Interface for classes that convert one type of Wikibase data value into a * RDF. * * @author Markus Kroetzsch * * @param * the type of Wikibase value converted by this class */ public interface ValueConverter { /** * Returns an RDF value that should be used to represent the given Wikibase * data value in RDF. * * @param value * the value to convert * @param propertyIdValue * the property for which this value was used; this provides * important context information for the conversion * @param simple * if true, use a simplified conversion to RDF and do not convert * values that are inherently complex * @return the RDF value to use for representing the data value in RDF */ Value getRdfValue(V value, PropertyIdValue propertyIdValue, boolean simple); /** * Writes auxiliary triples that might be needed to encode a Wikibase value * in RDF. * * @throws RDFHandlerException * if there is a problem writing the triples */ void writeAuxiliaryTriples() throws RDFHandlerException; } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/values/package-info.java000066400000000000000000000014751444772566300317670ustar00rootroot00000000000000/** * Package for code related to the conversion of data values to RDF in various ways. * * @author Markus Kroetzsch */ package org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/000077500000000000000000000000001444772566300202745ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/000077500000000000000000000000001444772566300212155ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/000077500000000000000000000000001444772566300220045ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/000077500000000000000000000000001444772566300236015ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300245525ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/000077500000000000000000000000001444772566300253255ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/AbstractRdfConverterTest.java000066400000000000000000000102271444772566300331210ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /*- * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 - 2022 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.implementation.DatatypeIdImpl; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import static org.junit.Assert.assertEquals; public class AbstractRdfConverterTest { @Test public void testIriForItem() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_ITEM)), Vocabulary.DT_ITEM); } @Test public void testIriForProperty() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_PROPERTY)), Vocabulary.DT_PROPERTY); } @Test public void testIriForCoordinate() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_GLOBE_COORDINATES)), Vocabulary.DT_GLOBE_COORDINATES); } @Test public void testIriForTime() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_TIME)), Vocabulary.DT_TIME); } @Test public void testIriForString() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_STRING)), Vocabulary.DT_STRING); } @Test public void testIriForQuantity() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_QUANTITY)), Vocabulary.DT_QUANTITY); } @Test public void testIriForCommons() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_COMMONS_MEDIA)), Vocabulary.DT_COMMONS_MEDIA); } @Test public void testIriForExternalId() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_EXTERNAL_ID)), Vocabulary.DT_EXTERNAL_ID); } @Test public void testIriForMath() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_MATH)), Vocabulary.DT_MATH); } @Test public void testIriForGeoShape() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_GEO_SHAPE)), Vocabulary.DT_GEO_SHAPE); } @Test public void testIriForUrl() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_URL)), Vocabulary.DT_URL); } @Test public void testIriForMonolingualText() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_MONOLINGUAL_TEXT)), Vocabulary.DT_MONOLINGUAL_TEXT); } @Test public void testIriForEdtf() { assertEquals( AbstractRdfConverter.getDatatypeIri(Datamodel.makeDatatypeIdValueFromJsonString(DatatypeIdImpl.JSON_DT_EDTF)), Vocabulary.DT_EDTF); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/MockPropertyRegister.java000066400000000000000000001222671444772566300323450ustar00rootroot00000000000000/* * #%L * Wikidata Toolkit Testing Utilities * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.rdf; import java.util.HashMap; import java.util.Map; import org.junit.Assert; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; /** * This is a helper class for testing processes which need a * {@link PropertyRegister}. It includes some property types and pattern URIs * for testing. * * @author Michael Guenther * */ public class MockPropertyRegister extends PropertyRegister { public MockPropertyRegister() { super("P1921", BasicApiConnection.getWikidataApiConnection(), Datamodel.SITE_WIKIDATA); this.datatypes.putAll(KNOWN_PROPERTY_TYPES); this.uriPatterns.putAll(KNOWN_URI_PATTERNS); } @Override protected void fetchPropertyInformation(PropertyIdValue startProperty) { Assert.fail("Please add " + startProperty + "to the datatypes and uriPatterns map."); } public static class WithNullPropertyTypes extends MockPropertyRegister { public WithNullPropertyTypes() { super(); Map NULL_PROPERTY_TYPES = new HashMap<>(); for (Map.Entry e : KNOWN_PROPERTY_TYPES.entrySet()) { NULL_PROPERTY_TYPES.put(e.getKey(), null); } this.datatypes.putAll(NULL_PROPERTY_TYPES); this.uriPatterns.putAll(KNOWN_URI_PATTERNS); } } static Map KNOWN_URI_PATTERNS = new HashMap<>(); static { KNOWN_URI_PATTERNS.put("P434", "https://musicbrainz.org/artist/$1"); } static Map KNOWN_PROPERTY_TYPES = new HashMap<>(); static { KNOWN_PROPERTY_TYPES.put("P10", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P1001", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1002", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1003", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1004", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1005", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1006", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P101", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1013", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1014", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1015", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1016", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1017", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1019", DatatypeIdValue.DT_URL); KNOWN_PROPERTY_TYPES.put("P102", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1025", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1027", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P103", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1030", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1031", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1033", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1034", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1036", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1037", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1038", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1039", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1040", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1042", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1044", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1047", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1048", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P105", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1054", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1055", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1056", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1058", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1059", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P106", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1065", DatatypeIdValue.DT_URL); KNOWN_PROPERTY_TYPES.put("P1066", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1067", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1069", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P107", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1070", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1074", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1075", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1076", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1077", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P108", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1080", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1081", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1082", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1085", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1086", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P109", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P1092", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P110", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1100", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1101", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1103", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1104", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1107", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1108", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1110", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1113", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1114", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1115", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1118", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1119", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P112", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1120", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1121", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1128", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P113", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1130", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1132", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1134", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P114", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1142", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1144", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1146", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1148", DatatypeIdValue.DT_QUANTITY); KNOWN_PROPERTY_TYPES.put("P1149", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P115", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P1150", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P117", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P118", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P119", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P121", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P122", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P123", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P126", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P127", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P131", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P132", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P133", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P134", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P135", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P136", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P137", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P138", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P14", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P140", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P141", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P143", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P144", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P149", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P15", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P150", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P154", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P155", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P156", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P157", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P158", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P159", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P16", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P161", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P162", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P163", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P166", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P167", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P168", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P169", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P17", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P170", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P171", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P172", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P173", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P175", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P176", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P177", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P178", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P179", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P18", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P180", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P181", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P183", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P184", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P185", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P186", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P189", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P19", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P190", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P193", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P194", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P195", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P196", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P197", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P198", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P199", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P20", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P200", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P201", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P202", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P205", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P206", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P208", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P209", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P21", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P210", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P212", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P213", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P214", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P215", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P217", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P218", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P219", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P22", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P220", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P223", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P225", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P227", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P229", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P230", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P231", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P232", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P233", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P234", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P235", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P236", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P237", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P238", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P239", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P240", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P241", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P242", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P243", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P244", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P245", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P246", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P247", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P248", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P249", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P25", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P26", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P263", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P264", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P268", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P269", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P27", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P270", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P271", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P272", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P274", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P275", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P276", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P277", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P279", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P281", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P282", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P286", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P287", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P289", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P291", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P295", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P296", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P297", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P298", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P299", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P30", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P300", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P301", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P304", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P306", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P31", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P344", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P345", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P347", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P348", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P349", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P35", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P350", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P352", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P355", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P356", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P357", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P358", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P359", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P36", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P360", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P361", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P364", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P366", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P367", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P37", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P370", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P371", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P373", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P374", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P375", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P376", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P377", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P38", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P380", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P381", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P382", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P387", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P39", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P392", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P393", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P395", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P396", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P397", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P40", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P400", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P402", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P403", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P404", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P405", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P406", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P407", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P408", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P409", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P41", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P410", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P412", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P413", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P414", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P417", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P418", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P421", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P424", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P425", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P426", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P427", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P428", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P429", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P43", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P432", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P433", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P434", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P435", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P436", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P437", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P438", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P439", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P44", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P440", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P442", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P443", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P444", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P447", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P448", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P449", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P45", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P450", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P451", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P452", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P453", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P454", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P455", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P457", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P458", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P459", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P460", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P461", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P462", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P463", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P465", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P466", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P467", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P47", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P473", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P474", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P477", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P478", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P480", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P484", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P485", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P486", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P487", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P488", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P489", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P490", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P492", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P493", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P494", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P495", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P497", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P498", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P50", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P500", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P501", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P504", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P506", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P508", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P509", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P51", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P511", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P512", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P513", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P516", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P518", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P520", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P521", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P522", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P523", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P524", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P525", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P527", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P528", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P529", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P53", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P530", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P531", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P532", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P535", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P536", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P539", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P54", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P542", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P543", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P545", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P547", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P549", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P551", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P552", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P553", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P554", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P555", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P557", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P558", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P559", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P560", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P561", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P562", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P563", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P564", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P566", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P569", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P57", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P570", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P571", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P574", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P575", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P576", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P577", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P579", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P58", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P580", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P582", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P585", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P586", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P587", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P59", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P590", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P592", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P597", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P599", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P6", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P60", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P600", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P604", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P605", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P606", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P607", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P608", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P609", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P61", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P610", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P611", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P612", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P613", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P618", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P619", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P624", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P625", DatatypeIdValue.DT_GLOBE_COORDINATES); KNOWN_PROPERTY_TYPES.put("P627", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P629", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P630", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P631", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P633", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P634", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P635", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P637", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P638", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P640", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P641", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P642", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P646", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P648", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P649", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P65", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P653", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P655", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P657", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P658", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P66", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P661", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P662", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P664", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P665", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P668", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P669", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P670", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P672", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P673", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P674", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P676", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P677", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P680", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P681", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P682", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P683", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P685", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P686", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P687", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P69", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P691", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P694", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P695", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P697", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P7", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P70", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P702", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P703", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P705", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P706", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P708", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P709", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P71", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P710", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P711", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P712", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P713", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P714", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P715", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P716", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P718", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P720", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P721", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P722", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P725", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P729", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P734", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P735", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P736", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P737", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P74", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P740", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P741", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P742", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P743", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P744", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P747", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P749", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P750", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P757", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P758", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P759", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P76", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P761", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P762", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P763", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P764", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P765", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P766", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P767", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P768", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P77", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P770", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P771", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P772", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P773", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P774", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P775", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P78", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P780", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P782", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P790", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P791", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P792", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P793", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P794", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P799", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P800", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P802", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P803", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P804", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P805", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P806", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P808", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P809", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P81", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P813", DatatypeIdValue.DT_TIME); KNOWN_PROPERTY_TYPES.put("P814", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P815", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P816", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P817", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P827", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P828", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P829", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P830", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P831", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P832", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P833", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P836", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P837", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P838", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P84", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P840", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P841", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P842", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P846", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P849", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P85", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P850", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P853", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P854", DatatypeIdValue.DT_URL); KNOWN_PROPERTY_TYPES.put("P856", DatatypeIdValue.DT_URL); KNOWN_PROPERTY_TYPES.put("P858", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P86", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P862", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P865", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P866", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P867", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P868", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P87", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P872", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P878", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P879", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P88", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P882", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P883", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P884", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P888", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P898", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P9", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P901", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P902", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P905", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P906", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P908", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P909", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P91", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P910", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P912", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P913", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P914", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P915", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P916", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P92", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P921", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P931", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P933", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P935", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P937", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P94", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P941", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P944", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P945", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P946", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P947", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P948", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P949", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P950", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P951", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P954", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P957", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P958", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P959", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P960", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P961", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P963", DatatypeIdValue.DT_URL); KNOWN_PROPERTY_TYPES.put("P964", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P965", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P966", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P969", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P97", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P971", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P972", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P973", DatatypeIdValue.DT_URL); KNOWN_PROPERTY_TYPES.put("P98", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P982", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P984", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P990", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P991", DatatypeIdValue.DT_ITEM); KNOWN_PROPERTY_TYPES.put("P996", DatatypeIdValue.DT_COMMONS_MEDIA); KNOWN_PROPERTY_TYPES.put("P998", DatatypeIdValue.DT_STRING); KNOWN_PROPERTY_TYPES.put("P1647", DatatypeIdValue.DT_PROPERTY); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/PropertyRegisterTest.java000066400000000000000000000226641444772566300323730ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hamcrest.core.IsIterableContaining; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.hamcrest.MockitoHamcrest; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.DocumentDataFilter; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.wikibaseapi.BasicApiConnection; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; public class PropertyRegisterTest { PropertyRegister propertyRegister; final String siteIri = "http://www.example.org/entities/"; final TestObjectFactory objectFactory = new TestObjectFactory(); final DataObjectFactory dataObjectFactory = new DataObjectFactoryImpl(); @Before public void setUp() throws MediaWikiApiErrorException, IOException { Map mockResult = new HashMap<>(); List mockStatementGroups = new ArrayList<>(); PropertyIdValue pid434 = dataObjectFactory.getPropertyIdValue("P434", this.siteIri); PropertyIdValue pid508 = dataObjectFactory.getPropertyIdValue("P508", this.siteIri); PropertyIdValue pid23 = dataObjectFactory.getPropertyIdValue("P23", this.siteIri); PropertyIdValue pid1921 = dataObjectFactory.getPropertyIdValue("P1921", this.siteIri); Statement p23Statement = dataObjectFactory.getStatement(pid434, dataObjectFactory.getValueSnak(pid23, dataObjectFactory.getItemIdValue("Q42", this.siteIri)), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "000"); Statement p1921Statement = dataObjectFactory .getStatement( pid434, dataObjectFactory .getValueSnak( pid1921, dataObjectFactory .getStringValue("http://musicbrainz.org/$1/artist")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "000"); Statement p1921StatementExternalID = dataObjectFactory .getStatement( pid508, dataObjectFactory .getValueSnak( pid1921, dataObjectFactory .getStringValue("http://purl.org/bncf/tid/$1")), Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "000"); mockStatementGroups.add(dataObjectFactory.getStatementGroup(Collections .singletonList(p23Statement))); mockStatementGroups.add(dataObjectFactory.getStatementGroup(Collections .singletonList(p1921Statement))); mockResult.put("P434", dataObjectFactory.getPropertyDocument(pid434, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), mockStatementGroups, dataObjectFactory .getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_STRING), 0)); mockResult.put("P23", dataObjectFactory.getPropertyDocument(pid23, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), dataObjectFactory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_ITEM), 0)); mockResult.put("P508", dataObjectFactory.getPropertyDocument(pid508, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(dataObjectFactory.getStatementGroup( Collections.singletonList(p1921StatementExternalID) )), dataObjectFactory.getDatatypeIdValueFromJsonId(DatatypeIdValue.JSON_DT_EXTERNAL_ID), 0)); this.propertyRegister = new PropertyRegister("P1921", new BasicApiConnection("http://localhost/"), this.siteIri); WikibaseDataFetcher dataFetcher = Mockito .mock(WikibaseDataFetcher.class); Mockito.when(dataFetcher.getEntityDocuments((List)MockitoHamcrest.argThat(IsIterableContaining.hasItems("P434")))) .thenReturn(mockResult); Mockito.when(dataFetcher.getEntityDocuments((List)MockitoHamcrest.argThat(IsIterableContaining.hasItems("P508")))) .thenReturn(mockResult); Mockito.when(dataFetcher.getFilter()).thenReturn( new DocumentDataFilter()); this.propertyRegister.dataFetcher = dataFetcher; } @Test public void testGetWikidataPropertyRegister() { assertEquals("P1921", this.propertyRegister.uriPatternPropertyId); } @Test public void testFetchPropertyUriPattern() { PropertyIdValue pid = this.dataObjectFactory.getPropertyIdValue("P434", this.siteIri); assertEquals("http://musicbrainz.org/$1/artist", this.propertyRegister.getPropertyUriPattern(pid)); // Check twice to test that the cached retrieval works too assertEquals("http://musicbrainz.org/$1/artist", this.propertyRegister.getPropertyUriPattern(pid)); assertEquals(50, this.propertyRegister.smallestUnfetchedPropertyIdNumber); assertTrue(this.propertyRegister.datatypes.containsKey("P434")); } @Test public void testFetchPropertyUriPatternExternalID() { PropertyIdValue pid = this.dataObjectFactory.getPropertyIdValue("P508", this.siteIri); assertEquals("http://purl.org/bncf/tid/$1", this.propertyRegister.getPropertyUriPattern(pid)); } @Test public void testGetPropertyType() { assertEquals(DatatypeIdValue.DT_STRING, this.propertyRegister.getPropertyType(dataObjectFactory .getPropertyIdValue("P434", this.siteIri))); // Check twice to test that the cached retrieval works too assertEquals(DatatypeIdValue.DT_STRING, this.propertyRegister.getPropertyType(dataObjectFactory .getPropertyIdValue("P434", this.siteIri))); assertEquals(50, this.propertyRegister.smallestUnfetchedPropertyIdNumber); assertTrue(this.propertyRegister.datatypes.containsKey("P434")); } @Test public void testGetMissingPropertyType() { assertNull(this.propertyRegister.getPropertyType(dataObjectFactory .getPropertyIdValue("P10000", this.siteIri))); final int smallestBefore = this.propertyRegister.smallestUnfetchedPropertyIdNumber; // Check twice to test fast failing on retry assertNull(this.propertyRegister.getPropertyType(dataObjectFactory .getPropertyIdValue("P10000", this.siteIri))); assertEquals("no requests should be made if the property is known to be missing", smallestBefore, this.propertyRegister.smallestUnfetchedPropertyIdNumber); } @Test public void testSetPropertyTypeFromEntityIdValue() { PropertyIdValue pid = this.dataObjectFactory .getPropertyIdValue("P1001", this.siteIri); assertEquals(this.propertyRegister.setPropertyTypeFromEntityIdValue( pid, this.dataObjectFactory.getItemIdValue("Q20", this.siteIri)), DatatypeIdValue.DT_ITEM); assertEquals(this.propertyRegister.setPropertyTypeFromEntityIdValue( pid, this.dataObjectFactory.getPropertyIdValue("P58", this.siteIri)), DatatypeIdValue.DT_PROPERTY); assertEquals(this.propertyRegister.setPropertyTypeFromEntityIdValue( pid, this.dataObjectFactory.getLexemeIdValue("L343", this.siteIri)), DatatypeIdValue.DT_LEXEME); assertEquals(this.propertyRegister.setPropertyTypeFromEntityIdValue( pid, this.dataObjectFactory.getFormIdValue("L343-F1", this.siteIri)), DatatypeIdValue.DT_FORM); assertEquals(this.propertyRegister.setPropertyTypeFromEntityIdValue( pid, this.dataObjectFactory.getSenseIdValue("L343-S34", this.siteIri)), DatatypeIdValue.DT_SENSE); } @Test public void testSetPropertyTypeFromStringValue() { assertEquals(this.propertyRegister.setPropertyTypeFromStringValue( dataObjectFactory.getPropertyIdValue("P434", this.siteIri), dataObjectFactory .getStringValue("http://musicbrainz.org/$1/artist")), "http://wikiba.se/ontology#String"); } @Test public void testSetMissingPropertyTypeFromStringValue() { assertEquals(this.propertyRegister.setPropertyTypeFromStringValue( dataObjectFactory.getPropertyIdValue("P10", this.siteIri), dataObjectFactory .getStringValue("http://musicbrainz.org/$1/artist")), "http://wikiba.se/ontology#String"); } @Test public void testWikidataPropertyRegister() { PropertyRegister pr = PropertyRegister.getWikidataPropertyRegister(); assertEquals(Datamodel.SITE_WIKIDATA, pr.getUriPrefix()); assertEquals("P1921", pr.uriPatternPropertyId); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/RdfConverterTest.java000066400000000000000000000276551444772566300314520ustar00rootroot00000000000000/* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.rdf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.eclipse.rdf4j.model.Model; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.model.util.Models; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFParseException; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.implementation.SitesImpl; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; public class RdfConverterTest { ByteArrayOutputStream out; RdfWriter rdfWriter; RdfConverter rdfConverter; SitesImpl sites; ValueFactory rdfFactory = SimpleValueFactory.getInstance(); Resource resource = rdfFactory.createIRI("http://test.org/"); final TestObjectFactory objectFactory = new TestObjectFactory(); final DataObjectFactory dataObjectFactory = new DataObjectFactoryImpl(); @Before public void setUp() { this.out = new ByteArrayOutputStream(); this.rdfWriter = new RdfWriter(RDFFormat.TURTLE, out); this.sites = new SitesImpl(); this.rdfConverter = new RdfConverter(this.rdfWriter, this.sites, new MockPropertyRegister()); this.rdfWriter.start(); } @Test public void testWriteItemDocument() throws RDFHandlerException, IOException, RDFParseException { ItemDocument document = this.objectFactory.createItemDocument(); this.rdfConverter.writeItemDocument(document); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(out.toString()); assertTrue(Models.isomorphic( model, RdfTestHelpers.parseRdf(RdfTestHelpers.getResourceFromFile("ItemDocument.rdf")) )); } @Test public void testWriteItemDocumentWithNullPropertyTypes() throws RDFHandlerException, IOException, RDFParseException { this.rdfConverter = new RdfConverter(this.rdfWriter, this.sites, new MockPropertyRegister.WithNullPropertyTypes()); ItemDocument document = this.objectFactory.createItemDocument(); this.rdfConverter.writeItemDocument(document); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(out.toString()); assertTrue(Models.isomorphic( model, RdfTestHelpers.parseRdf(RdfTestHelpers.getResourceFromFile("ItemDocumentUnknownPropertyTypes.rdf")) )); } @Test public void testWritePropertyDocument() throws RDFHandlerException, RDFParseException, IOException { PropertyDocument document = this.objectFactory .createEmptyPropertyDocument(); this.rdfConverter.writePropertyDocument(document); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("EmptyPropertyDocument.rdf"))); } @Test public void testWriteStatementRankTriple() throws RDFHandlerException, RDFParseException, IOException { StatementRank rank = StatementRank.DEPRECATED; Resource subject = this.rdfFactory .createIRI("http://www.wikidata.org/Q10Snone"); this.rdfConverter.writeStatementRankTriple(subject, rank, false); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("StatementRankTriple.rdf")), model); } @Test public void testWriteStatementRankTripleBest() throws RDFHandlerException, RDFParseException, IOException { StatementRank rank = StatementRank.NORMAL; Resource subject = this.rdfFactory .createIRI("http://www.wikidata.org/Q10Snone"); this.rdfConverter.writeStatementRankTriple(subject, rank, true); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("StatementRankTripleBest.rdf")), model); } @Test public void testStatementSimpleValue() throws RDFHandlerException, RDFParseException, IOException { Statement statement = objectFactory.createStatement("Q100", "P227").withStatementId("Q100-id111"); this.rdfConverter.writeFullStatement(statement, false); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("Statement.rdf"))); } @Test public void testStatementComplexValue() throws RDFHandlerException, RDFParseException, IOException { GlobeCoordinatesValue value = Datamodel.makeGlobeCoordinatesValue(51, 13, GlobeCoordinatesValue.PREC_DEGREE, GlobeCoordinatesValue.GLOBE_EARTH); Statement statement = StatementBuilder .forSubjectAndProperty(ItemIdValue.NULL, PropertyIdValue.NULL) .withId("Q0$test") .withValue(value).build(); this.rdfConverter.writeFullStatement(statement, false); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("StatementCplx.rdf"))); } @Test public void testStatementNoValue() throws RDFHandlerException, RDFParseException, IOException { PropertyIdValue pid = dataObjectFactory.getPropertyIdValue("P31", "http://www.wikidata.org/"); Statement statement = StatementBuilder .forSubjectAndProperty(ItemIdValue.NULL, pid) .withId("Q0$test") .withNoValue().build(); this.rdfConverter.writeFullStatement(statement, false); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("StatementNoValue.rdf"))); } @Test public void testWriteBasicDeclarations() throws RDFHandlerException, RDFParseException, IOException { this.rdfConverter.writeBasicDeclarations(); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("BasicDeclarations.rdf")), model); } @Test public void testWriteNamespaceDeclarations() throws RDFHandlerException, RDFParseException, IOException { this.rdfConverter.writeNamespaceDeclarations(); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("Namespaces.rdf")), model); } @Test public void testWriteSiteLinks() throws RDFHandlerException, IOException, RDFParseException { this.sites.setSiteInformation("enwiki", "wikipedia", "en", "mediawiki", "http://en.wikipedia.org/w/$1", "http://en.wikipedia.org/wiki/$1"); this.sites.setSiteInformation("dewiki", "wikipedia", "de", "mediawiki", "http://de.wikipedia.org/w/$1", "http://de.wikipedia.org/wiki/$1"); Map siteLinks = objectFactory.createSiteLinks(); this.rdfConverter.writeSiteLinks(this.resource, siteLinks); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("SiteLinks.rdf"))); } private ItemDocument createTestItemDocument() { ItemIdValue itemValue = dataObjectFactory.getItemIdValue("Q100", "http://www.wikidata.org/"); ItemIdValue value1 = dataObjectFactory.getItemIdValue("Q10", "http://www.wikidata.org/"); ItemIdValue value2 = dataObjectFactory.getItemIdValue("Q11", "http://www.wikidata.org/"); PropertyIdValue propertyIdValueP31 = dataObjectFactory .getPropertyIdValue("P31", "http://www.wikidata.org/"); PropertyIdValue propertyIdValueP279 = dataObjectFactory .getPropertyIdValue("P279", "http://www.wikidata.org/"); // Statement InstaceOf - P31 ValueSnak mainSnak1 = dataObjectFactory.getValueSnak( propertyIdValueP31, value1); Statement statement1 = dataObjectFactory.getStatement(itemValue, mainSnak1, Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "10103"); List statementList1 = new ArrayList<>(); statementList1.add(statement1); StatementGroup statementGroup1 = this.dataObjectFactory .getStatementGroup(statementList1); // Statement SubclassOf - P279 ValueSnak mainSnak2 = dataObjectFactory.getValueSnak( propertyIdValueP279, value2); Statement statement2 = dataObjectFactory.getStatement(itemValue, mainSnak2, Collections.emptyList(), Collections.emptyList(), StatementRank.NORMAL, "10104"); List statementList2 = new ArrayList<>(); statementList2.add(statement2); StatementGroup statementGroup2 = this.dataObjectFactory .getStatementGroup(statementList2); List statementGroups = new ArrayList<>(); statementGroups.add(statementGroup1); statementGroups.add(statementGroup2); return dataObjectFactory.getItemDocument(itemValue, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), statementGroups, Collections.emptyMap(), 0); } @Test public void testWriteSimpleStatements() throws RDFHandlerException, RDFParseException, IOException { ItemDocument document = createTestItemDocument(); this.rdfConverter.setTasks(RdfSerializer.TASK_SIMPLE_STATEMENTS); this.rdfConverter.writeStatements(document); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals( RdfTestHelpers .parseRdf("\n ;\n" + " .\n"), model); } @Test public void testWriteInterPropertyLinks() throws RDFHandlerException, RDFParseException, IOException { PropertyDocument document = this.dataObjectFactory.getPropertyDocument( this.dataObjectFactory.getPropertyIdValue("P17", "http://www.wikidata.org/"), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), this.dataObjectFactory.getDatatypeIdValue(DatatypeIdValue.DT_ITEM), 0); this.rdfConverter.writeInterPropertyLinks(document); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(out.toString()); assertEquals(RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("InterPropertyLinks.rdf")), model); } @After public void clear() throws RDFHandlerException, IOException { this.out.close(); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/RdfSerializerTest.java000066400000000000000000000036451444772566300316050ustar00rootroot00000000000000/* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.rdf; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; import org.eclipse.rdf4j.model.Model; import org.eclipse.rdf4j.model.util.Models; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFParseException; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.implementation.SitesImpl; public class RdfSerializerTest { final TestObjectFactory objectFactory = new TestObjectFactory(); ByteArrayOutputStream out; RdfSerializer rdfSerializer; @Before public void setUp() { this.out = new ByteArrayOutputStream(); this.rdfSerializer = new RdfSerializer(RDFFormat.TURTLE, this.out, new SitesImpl(), new MockPropertyRegister()); } @Test public void testSerialization() throws RDFParseException, RDFHandlerException, IOException { this.rdfSerializer.open(); this.rdfSerializer.processItemDocument(this.objectFactory .createItemDocument()); this.rdfSerializer.close(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertTrue(Models.isomorphic( RdfTestHelpers.parseRdf(RdfTestHelpers.getResourceFromFile("completeRDFDocument.rdf")), model )); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/RdfTestHelpers.java000066400000000000000000000034571444772566300310770ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import org.eclipse.rdf4j.model.Model; import org.eclipse.rdf4j.model.impl.LinkedHashModel; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFParseException; import org.eclipse.rdf4j.rio.RDFParser; import org.eclipse.rdf4j.rio.Rio; import org.eclipse.rdf4j.rio.helpers.StatementCollector; import org.wikidata.wdtk.testing.MockStringContentFactory; public class RdfTestHelpers { public static String getResourceFromFile(String fileName) throws IOException { return MockStringContentFactory.getStringFromUrl(RdfTestHelpers.class .getResource("/" + fileName)); } public static Model parseRdf(String rdfResource) throws RDFParseException, RDFHandlerException, IOException { InputStream inStream = new ByteArrayInputStream(rdfResource.getBytes()); RDFParser parser = Rio.createParser(RDFFormat.TURTLE); Model graph = new LinkedHashModel(); parser.setRDFHandler(new StatementCollector(graph)); parser.parse(inStream, "http://test/"); return graph; } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/TestObjectFactory.java000066400000000000000000000403231444772566300315700ustar00rootroot00000000000000package org.wikidata.wdtk.rdf; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.SomeValueSnak; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; /** * This class provides functions to create objects from * {@link org.wikidata.wdtk.datamodel.interfaces} with certain predefined * parameters. * * @author Michael Günther, Fredo Erxleben * */ public class TestObjectFactory { private final DataObjectFactory factory = new DataObjectFactoryImpl(); private static String baseIri = "http://www.wikidata.org/"; /** * Creates an empty {@link PropertyDocument} * *

* ID = PropDoc *

* * @return empty {@link PropertyDocument} */ public PropertyDocument createEmptyPropertyDocument() { PropertyIdValue propertyId = this.factory.getPropertyIdValue("P1", baseIri); List labels = new LinkedList<>(); List descriptions = new LinkedList<>(); List aliases = new LinkedList<>(); DatatypeIdValue datatypeId = this.factory .getDatatypeIdValue(DatatypeIdValue.DT_GLOBE_COORDINATES); return this.factory.getPropertyDocument( propertyId, labels, descriptions, aliases, Collections.emptyList(), datatypeId, 0); } /** * Creates a {@link ItemDocument} * *

* ID = Item *

* *

* Default values *

*
    *
  • ItemId: "Q10"
  • *
  • baseIri: baseIri
  • *
  • Labels: {@link #createLabels Labs}
  • *
  • Descriptions: {@link #createDescriptions Descs}
  • *
  • Aliases: {@link #createAliases Aliases}
  • *
  • StatementGroups: *
      *
    • StatementGroup1 *
        *
      • PropertyId: "P10"
      • *
      • baseIri: baseIri
      • *
      • Statement1 *
          *
        • Mainsnak: NoValueSnak
        • *
        • Rank: normal
        • *
        *
      *
    • *
    • StatementGroup2 *
        *
      • PropertyId: "P569"
      • *
      • baseIri: baseIri
      • *
      • Statement2 *
          *
        • Mainsnak: {@link #createValueSnakTimeValue ValSnakTime}
        • *
        • Qualifiers: {@link #createQualifiers Quals}
        • *
        • Rank: normal
        • *
        *
      • *
      *
    • *
    • StatementGroup3 *
        *
      • PropertyId: "P549"
      • *
      • baseIri: baseIri
      • *
      • Statement3 *
          *
        • Mainsnak: {@link #createValueSnakStringValue ValSnakStr}
        • *
        • Rank: normal
        • *
        *
      • *
      *
    • *
    *
  • *
* * * @return {@link ItemDocument} */ public ItemDocument createItemDocument() { List statementGroups = new ArrayList<>(); List statements1 = new ArrayList<>(); Claim claim1 = factory.getClaim(factory.getItemIdValue("Q10", baseIri), factory.getNoValueSnak(factory.getPropertyIdValue("P10", baseIri)), Collections.emptyList()); statements1.add(factory.getStatement(claim1, Collections.emptyList(), StatementRank.NORMAL, "Q10$none")); statementGroups.add(factory.getStatementGroup(statements1)); List statements2 = new ArrayList<>(); Claim claim2 = factory.getClaim(factory.getItemIdValue("Q10", baseIri), createValueSnakTimeValue("P569"), createQualifiers()); statements2.add(factory.getStatement(claim2, createReferences(), StatementRank.NORMAL, "Q10$none2")); statementGroups.add(factory.getStatementGroup(statements2)); List statements3 = new ArrayList<>(); Claim claim3 = factory.getClaim(factory.getItemIdValue("Q10", baseIri), createValueSnakStringValue("P549"), Collections.emptyList()); statements3.add(factory.getStatement(claim3, Collections.emptyList(), StatementRank.NORMAL, "Q10$none3")); statementGroups.add(factory.getStatementGroup(statements3)); return factory.getItemDocument(factory.getItemIdValue("Q10", baseIri), createLabels(), createDescriptions(), createAliases(), statementGroups, createSiteLinks(), 0); } /** * Creates a {@link Statement} with entity-id qId, property-id pId * *

* ID = Stat *

* *

* Default values *

*
    *
  • Rank: "normal" *
  • MainSnak: {@link #createValueSnakStringValue ValSnakStr} *
  • StatementId: "id111" *
  • References: {@link #createReferences() Refs} *
* * @param qId * @param pId * @return {@link Statement} */ public Statement createStatement(String qId, String pId) { return factory.getStatement( createClaim(qId, createValueSnakStringValue(pId)), createReferences(), StatementRank.NORMAL, "id111"); } /** * Creates a {@link StatementGroup} * *

* ID = StatGr *

* *

* Default values *

*
    *
  • Statement1: {@link #createStatement(String, String) Stat} (qId = Q10, * pId= P122)
  • *
  • Statement2: Statement with Rank = "normal", Mainsnak = * {@link #createValueSnakQuantityValue(String) ValSnakQuant}, StatementId = * "id112"
  • *
* * @return {@link StatementGroup} */ public StatementGroup createStatementGroup() { final String pId = "P122"; final String qId = "Q10"; List statements = new ArrayList<>(); statements.add(createStatement(qId, pId)); statements.add(factory.getStatement( createClaim(qId, createValueSnakQuantityValue(pId)), Collections.emptyList(), StatementRank.NORMAL, "id112")); return factory.getStatementGroup(statements); } /** * Creates a list of labels. * *

* ID = Labs *

* *

* Default values *

*
    *
  • MonolingualTextValue1: "foo" (label in the certain language), "lc" * (LanguageCode)
  • *
  • MonolingualTextValue2: "bar" (label in the certain language), "lc2" * (LanguageCode)
  • *
* * @return list of {@link MonolingualTextValue} */ public List createLabels() { List result = new LinkedList<>(); result.add(factory.getMonolingualTextValue("foo", "lc")); result.add(factory.getMonolingualTextValue("bar", "lc2")); return result; } /** * Creates a list of aliases. * *

* ID = Aliases *

* *

* Default values *

*
    *
  • MonolingualTextValue: "foo" (alias for the certain language), "lc" * (LanguageCode)
  • *
  • MonolingualTextValue: "bar" (label in the certain language), "lc" * (LanguageCode)
  • * * @return List of {@link MonolingualTextValue} */ public List createAliases() { List result = new LinkedList<>(); result.add(factory.getMonolingualTextValue("foo", "lc")); result.add(factory.getMonolingualTextValue("bar", "lc")); return result; } /** * Creates a list of descriptions. * *

    * ID = Descs *

    * *

    * Default values *

    *
      *
    • MonolingualTextValue: "it's foo" (description in the certain * language), "lc" (LanguageCode)
    • *
    • MonolingualTextValue: "it's bar" (description in the certain * language), "lc2" (LanguageCode)
    • *
    * * @return List of {@link MonolingualTextValue} */ public List createDescriptions() { List result = new LinkedList<>(); result.add(factory.getMonolingualTextValue("it's foo", "lc")); result.add(factory.getMonolingualTextValue("it's bar", "lc2")); return result; } /** * Creates a map of {@link SiteLink}s with empty badges. *

    * ID = SLs *

    * Default values *

    *
      *
    • "enwiki" => SiteLink: title = "title_en", siteKey = "enwiki"
    • *
    • "dewiki" => SiteLink: title = "title_de", siteKey = "dewiki"
    • *
    * * @return Map for {@link SiteLink}s and their titles */ public Map createSiteLinks() { Map result = new HashMap<>(); result.put("enwiki", factory.getSiteLink("title_en", "enwiki", Collections.emptyList())); result.put("dewiki", factory.getSiteLink("title_de", "dewiki", Collections.singletonList(createItemIdValue("Q42")))); return result; } /** * Creates a list of qualifiers. * *

    * ID = Quals *

    * *

    * Default values *

    *
      *
    • ValSnakTime ( *
    • *
    * * @return List of {@link Snak} */ public List createQualifiers() { return Collections.singletonList(factory.getSnakGroup(Collections .singletonList(createValueSnakTimeValue("P15")))); } /** * Create a list of {@link Reference}s (containing only one reference). * *

    * ID = Refs *

    * *

    * Default values *

    *
      *
    • reference: snaks = {@link #createValueSnakTimeValue(String) * ValSnakTime}
    • *
    * * @return List of {@link Reference} */ public List createReferences() { List snaks = Collections.singletonList(factory .getSnakGroup(Collections .singletonList(createValueSnakTimeValue("P112")))); return Collections.singletonList(factory.getReference(snaks)); } /** * Creates a {@link Reference}. * *

    * ID = Ref *

    * *

    * DefualtValues *

    *
      *
    • Snak1: {@link #createValueSnakGlobeCoordinatesValue(String) * ValSnakGlCo (pId = P232)}
    • *
    • Snak2: {@link #createValueSnakQuantityValue(String) ValSnakQuant (pId * = 211)}
    • *
    * * @return {@link Reference} */ public Reference createReference() { List snakGroups = new ArrayList<>(); snakGroups.add(factory.getSnakGroup(Collections .singletonList(createValueSnakGlobeCoordinatesValue("P232")))); snakGroups.add(factory.getSnakGroup(Collections .singletonList(createValueSnakQuantityValue("P211")))); return factory.getReference(snakGroups); } /** * Creates a {@link Claim}. * *

    * ID = Claim *

    * * @param id * id of the subject of the {@link Claim} * @param snak * mainsnak for the {@link Claim} * * @return {@link Claim} with the given parameters */ public Claim createClaim(String id, Snak snak) { return factory.getClaim(factory.getItemIdValue(id, baseIri), snak, Collections.emptyList()); } /** * Creates a {@link SomeValueSnak} with pId. * *

    * ID = SomeValSnak *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    * * @param pId * property-id * @return {@link SomeValueSnak} */ public SomeValueSnak createSomeValueSnak(String pId) { return factory.getSomeValueSnak(factory .getPropertyIdValue(pId, baseIri)); } /** * Creates a {@link ValueSnak} with an {@link ItemIdValue} in it. * *

    * ID = ValSnakItem *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    * * @param pId * property-id * @param qId * item-id of the containing value * * @return {@link ValueSnak} */ public ValueSnak createValueSnakItemIdValue(String pId, String qId) { return factory.getValueSnak(factory.getPropertyIdValue(pId, baseIri), factory.getItemIdValue(qId, baseIri)); } /** * Creates a {@link ValueSnak} with an * {@link org.wikidata.wdtk.datamodel.interfaces.StringValue} in it. * *

    * ID = ValSnakStr *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    • String: "TestString"
    • *
    * * @param pId * property-id * * @return {@link ValueSnak} */ public ValueSnak createValueSnakStringValue(String pId) { return factory.getValueSnak(factory.getPropertyIdValue(pId, baseIri), factory.getStringValue("TestString")); } /** * Creates a {@link ValueSnak} with an {@link GlobeCoordinatesValue} in it. * *

    * ID = ValSnakGlCo *

    * *

    * Default values *

    *
      *
    • latitude: 213124
    • *
    • longitude: 21314
    • *
    • precision: 16666667
    • *
    * * @param pId * property-id * * @return {@link ValueSnak} */ public ValueSnak createValueSnakGlobeCoordinatesValue(String pId) { return factory.getValueSnak(factory.getPropertyIdValue(pId, baseIri), factory.getGlobeCoordinatesValue(213124, 21314, GlobeCoordinatesValue.PREC_ARCMINUTE, "http://www.wikidata.org/entity/Q2")); } /** * Creates a {@link ValueSnak} with an * {@link org.wikidata.wdtk.datamodel.interfaces.QuantityValue} in it. * *

    * ID = ValSnakQuant *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    • numericValue: 3
    • *
    • lowerBound: 3
    • *
    • upperBound: 3
    • *
    * * @param pId * property-id * * @return {@link ValueSnak} */ public ValueSnak createValueSnakQuantityValue(String pId) { return factory.getValueSnak(factory.getPropertyIdValue(pId, baseIri), factory.getQuantityValue(new BigDecimal(3), new BigDecimal(3), new BigDecimal(3))); } /** * Creates a {@link ValueSnak} with an * {@link org.wikidata.wdtk.datamodel.interfaces.TimeValue} in it. * *

    * ID = ValSnakTime *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    • year: 306
    • *
    • month: 11
    • *
    • day: 3
    • *
    • hour: 13
    • *
    • minute: 7
    • *
    • second: 6
    • *
    • precision: 32
    • *
    • beforeTolerance: 17
    • *
    • afterTolerance: 43
    • *
    • timezoneOffset: 0
    • *
    • calendarModel: "http://www.wikidata.org/entity/Q1985727"
    • *
    * * @param pId * property-id * * @return {@link ValueSnak} */ public ValueSnak createValueSnakTimeValue(String pId) { return factory.getValueSnak(factory.getPropertyIdValue(pId, baseIri), factory.getTimeValue(306, (byte) 11, (byte) 3, (byte) 13, (byte) 7, (byte) 6, (byte) 32, 17, 43, 0, "http://www.wikidata.org/entity/Q1985727")); } /** * Creates a {@link PropertyIdValue}. * *

    * ID = PropVal *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    * * @param id * property-id * * @return {@link PropertyIdValue} */ public PropertyIdValue createPropertyIdValue(String id) { return factory.getPropertyIdValue(id, baseIri); } /** * Creates an {@link ItemIdValue}. * *

    * ID = ItemVal *

    * *

    * Default values *

    *
      *
    • baseIri: "test"
    • *
    * * @param id * item-id * * @return {@link ItemIdValue} */ public ItemIdValue createItemIdValue(String id) { return factory.getItemIdValue(id, baseIri); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/values/000077500000000000000000000000001444772566300266245ustar00rootroot00000000000000ValueRdfConverterTest.java000066400000000000000000000165721444772566300336630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/java/org/wikidata/wdtk/rdf/valuespackage org.wikidata.wdtk.rdf.values; /* * #%L * Wikidata Toolkit RDF * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import org.junit.Before; import org.junit.Test; import org.eclipse.rdf4j.model.BNode; import org.eclipse.rdf4j.model.Model; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFParseException; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl; import org.wikidata.wdtk.datamodel.implementation.UnsupportedEntityIdValueImpl; import org.wikidata.wdtk.datamodel.interfaces.DataObjectFactory; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.UnsupportedEntityIdValue; import org.wikidata.wdtk.rdf.MockPropertyRegister; import org.wikidata.wdtk.rdf.OwlDeclarationBuffer; import org.wikidata.wdtk.rdf.PropertyRegister; import org.wikidata.wdtk.rdf.RdfTestHelpers; import org.wikidata.wdtk.rdf.RdfWriter; public class ValueRdfConverterTest { ByteArrayOutputStream out; RdfWriter rdfWriter; OwlDeclarationBuffer rdfConversionBuffer; PropertyRegister propertyRegister = new MockPropertyRegister(); DatamodelMapper mapper = new DatamodelMapper("http://www.wikidata.org/entity/"); DataObjectFactory objectFactory = new DataObjectFactoryImpl(); @Before public void setUp() { this.out = new ByteArrayOutputStream(); this.rdfWriter = new RdfWriter(RDFFormat.TURTLE, this.out); this.rdfConversionBuffer = new OwlDeclarationBuffer(); this.rdfWriter.start(); } @Test public void testWriteQuantityValue() throws RDFHandlerException, RDFParseException, IOException { QuantityValueConverter valueConverter = new QuantityValueConverter( this.rdfWriter, this.propertyRegister, this.rdfConversionBuffer); QuantityValue value = this.objectFactory.getQuantityValue( new BigDecimal(100), new BigDecimal(100), new BigDecimal(100)); PropertyIdValue propertyIdValue = objectFactory.getPropertyIdValue( "P1081", "http://www.wikidata.org/entity/"); Value valueURI = valueConverter.getRdfValue(value, propertyIdValue, false); valueConverter.writeValue(value, (Resource) valueURI); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("QuantityValue.rdf"))); } @Test public void testWriteUnboundedQuantityValue() throws RDFHandlerException, RDFParseException, IOException { QuantityValueConverter valueConverter = new QuantityValueConverter( this.rdfWriter, this.propertyRegister, this.rdfConversionBuffer); QuantityValue value = this.objectFactory.getQuantityValue(new BigDecimal(100)); PropertyIdValue propertyIdValue = objectFactory.getPropertyIdValue( "P1081", "http://www.wikidata.org/entity/"); Value valueURI = valueConverter.getRdfValue(value, propertyIdValue, false); valueConverter.writeValue(value, (Resource) valueURI); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("UnboundedQuantityValue.rdf"))); } @Test public void testWriteMonolingualTextValue() throws RDFHandlerException { MonolingualTextValueConverter valueConverter = new MonolingualTextValueConverter( this.rdfWriter, this.propertyRegister, this.rdfConversionBuffer); MonolingualTextValue value = this.objectFactory .getMonolingualTextValue("中华人民共和国", "zh-hans"); PropertyIdValue propertyIdValue = this.objectFactory .getPropertyIdValue("P1448", "http://www.wikidata.org/entity/"); Value valueURI = valueConverter.getRdfValue(value, propertyIdValue, true); this.rdfWriter.finish(); assertEquals(valueURI.toString(), "\"中华人民共和国\"@zh-Hans"); } @Test public void testWriteGlobeCoordinatesValue() throws RDFHandlerException, RDFParseException, IOException { GlobeCoordinatesValueConverter valueConverter = new GlobeCoordinatesValueConverter( this.rdfWriter, this.propertyRegister, this.rdfConversionBuffer); GlobeCoordinatesValue value = this.objectFactory .getGlobeCoordinatesValue(51.033333333333, 13.733333333333, (GlobeCoordinatesValue.PREC_DECI_DEGREE), "http://www.wikidata.org/entity/Q2"); PropertyIdValue propertyIdValue = objectFactory.getPropertyIdValue( "P625", "http://www.wikidata.org/entity/"); Value valueURI = valueConverter.getRdfValue(value, propertyIdValue, false); valueConverter.writeValue(value, (Resource) valueURI); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("GlobeCoordinatesValue.rdf"))); } @Test public void testWriteTimeValue() throws RDFHandlerException, RDFParseException, IOException { TimeValueConverter valueConverter = new TimeValueConverter( this.rdfWriter, this.propertyRegister, this.rdfConversionBuffer); TimeValue value = objectFactory.getTimeValue(2008, (byte) 1, (byte) 1, (byte) 0, (byte) 0, (byte) 0, (byte) 9, 0, 0, 0, "http://www.wikidata.org/entity/Q1985727"); PropertyIdValue propertyIdValue = objectFactory.getPropertyIdValue( "P569", "http://www.wikidata.org/entity/"); Value valueURI = valueConverter.getRdfValue(value, propertyIdValue, false); valueConverter.writeValue(value, (Resource) valueURI); this.rdfWriter.finish(); Model model = RdfTestHelpers.parseRdf(this.out.toString()); assertEquals(model, RdfTestHelpers.parseRdf(RdfTestHelpers .getResourceFromFile("TimeValue.rdf"))); } @Test public void testWriteUnsupportedEntityIdValue() throws RDFHandlerException, RDFParseException, IOException { AnyValueConverter valueConverter = new AnyValueConverter( this.rdfWriter, this.rdfConversionBuffer, this.propertyRegister); UnsupportedEntityIdValue value = mapper.readValue( "{\"type\":\"wikibase-entityid\",\"value\":{\"entity-type\":\"funky\",\"id\":\"Z343\"}}", UnsupportedEntityIdValueImpl.class); PropertyIdValue propertyIdValue = objectFactory.getPropertyIdValue( "P569", "http://www.wikidata.org/entity/"); Value valueURI = valueConverter.getRdfValue(value, propertyIdValue, false); this.rdfWriter.finish(); assertTrue(valueURI instanceof BNode); } } Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/000077500000000000000000000000001444772566300223065ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/BasicDeclarations.rdf000066400000000000000000000065231444772566300263630ustar00rootroot00000000000000 a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/EmptyPropertyDocument.rdf000066400000000000000000000016611444772566300273510ustar00rootroot00000000000000 a ; ; ; ; ; ; ; ; ; ; .Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/GlobeCoordinatesValue.rdf000066400000000000000000000007701444772566300272270ustar00rootroot00000000000000 a ; "5.1033333333333E1"^^ ; "1.3733333333333E1"^^ ; "1.0E-1"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/InterPropertyLinks.rdf000066400000000000000000000020521444772566300266310ustar00rootroot00000000000000 . . . . . . . . . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/ItemDocument.rdf000066400000000000000000000157071444772566300254120ustar00rootroot00000000000000 a ; "bar"@lc2 , "foo"@lc ; "it's bar"@lc2 , "it's foo"@lc ; "foo"@lc , "bar"@lc ; ; ; . a , , ; . a , ; ; "0306-11-03T13:07:06Z"^^ ; ; "0306-11-03T13:07:06Z"^^ ; ; . a , ; "TestString" ; . a ; "0306-11-03T13:07:06Z"^^ ; "32"^^ ; "0"^^ ; . a ; _:node1a5d5pvl8x1 . _:node1a5d5pvl8x1 a ; ; . a ; _:node1a5d5pvl8x3 . _:node1a5d5pvl8x3 a ; ; . a ; _:node1a5d5pvl8x5 . _:node1a5d5pvl8x5 a ; ; . a ; _:node1a5d5pvl8x7 . _:node1a5d5pvl8x7 a ; ; . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a . a ; "0306-11-03T13:07:06Z"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/ItemDocumentUnknownPropertyTypes.rdf000066400000000000000000000134271444772566300315610ustar00rootroot00000000000000 a ; "foo"@lc , "bar"@lc2 ; "it's foo"@lc , "it's bar"@lc2 ; "foo"@lc , "bar"@lc ; ; ; . a , ; . a , ; ; "0306-11-03T13:07:06Z"^^ ; ; "0306-11-03T13:07:06Z"^^ ; ; . a , ; "TestString" ; . a ; "0306-11-03T13:07:06Z"^^ ; "32"^^ ; "0"^^ ; . a ; _:node1c3irgg7cx9 . _:node1c3irgg7cx9 a ; ; . a . a . a . a . a . a . a . a ; _:node1c3irgg7cx11 . _:node1c3irgg7cx11 a ; ; . a . a . a . a . a . a . a . a ; _:node1c3irgg7cx13 . _:node1c3irgg7cx13 a ; ; . a . a . a . a . a . a . a . a ; "0306-11-03T13:07:06Z"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/Namespaces.rdf000066400000000000000000000007051444772566300250640ustar00rootroot00000000000000@prefix wd: . @prefix wikibase: . @prefix rdf: . @prefix rdfs: . @prefix owl: . @prefix xsd: . @prefix schema: . @prefix skos: . @prefix prov: . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/QuantityValue.rdf000066400000000000000000000007561444772566300256260ustar00rootroot00000000000000 a ; "100.0"^^ ; "100.0"^^ ; "100.0"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/SiteLinks.rdf000066400000000000000000000006061444772566300247120ustar00rootroot00000000000000 a ; ; "de" ; . a ; ; "en" . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/Statement.rdf000066400000000000000000000007301444772566300247470ustar00rootroot00000000000000 . a ; "TestString" ; ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/StatementCplx.rdf000066400000000000000000000010071444772566300255740ustar00rootroot00000000000000 . a ; ; "Point(13.0 51.0)"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/StatementNoValue.rdf000066400000000000000000000005121444772566300262370ustar00rootroot00000000000000 . a , ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/StatementRankTriple.rdf000066400000000000000000000001621444772566300267420ustar00rootroot00000000000000 . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/StatementRankTripleBest.rdf000066400000000000000000000003521444772566300275610ustar00rootroot00000000000000 . .Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/TimeValue.rdf000066400000000000000000000007451444772566300247040ustar00rootroot00000000000000 a ; "2008-01-01T00:00:00Z"^^ ; "9"^^ ; "0"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/UnboundedQuantityValue.rdf000066400000000000000000000004431444772566300274630ustar00rootroot00000000000000 a ; "100.0"^^ ; . Wikidata-Toolkit-0.14.6/wdtk-rdf/src/test/resources/completeRDFDocument.rdf000066400000000000000000000155171444772566300266570ustar00rootroot00000000000000@prefix wd: . @prefix wdv: . @prefix wikibase: . @prefix rdf: . @prefix rdfs: . @prefix owl: . @prefix xsd: . @prefix schema: . @prefix skos: . @prefix prov: . prov:wasDerivedFrom a owl:ObjectProperty . wikibase:timeValue a owl:DatatypeProperty . wikibase:quantityLowerBound a owl:DatatypeProperty . wikibase:quantityUnit a owl:ObjectProperty . wikibase:qualifier a owl:ObjectProperty . wikibase:geoLatitude a owl:DatatypeProperty . wikibase:Property a owl:Class . schema:inLanguage a owl:DatatypeProperty . wikibase:qualifierValue a owl:ObjectProperty . wikibase:quantityUpperBound a owl:DatatypeProperty . wikibase:directClaim a owl:ObjectProperty . wikibase:geoGlobe a owl:ObjectProperty . wikibase:timeTimezone a owl:DatatypeProperty . wikibase:claim a owl:ObjectProperty . wikibase:timePrecision a owl:DatatypeProperty . schema:about a owl:ObjectProperty . wikibase:propertyType a owl:ObjectProperty . wikibase:timeCalendarModel a owl:ObjectProperty . wikibase:geoPrecision a owl:DatatypeProperty . wikibase:geoLongitude a owl:DatatypeProperty . wikibase:GlobecoordinateValue a owl:Class . wikibase:Reference a owl:Class . wikibase:reference a owl:ObjectProperty . wikibase:referenceValue a owl:ObjectProperty . schema:description a owl:DatatypeProperty . schema:Article a owl:Class . wikibase:novalue a owl:ObjectProperty . skos:altLabel a owl:DatatypeProperty . wikibase:rank a owl:ObjectProperty . wikibase:QuantityValue a owl:Class . wikibase:statementProperty a owl:ObjectProperty . wikibase:Item a owl:Class . wikibase:TimeValue a owl:Class . wikibase:Statement a owl:Class . wikibase:statementValue a owl:ObjectProperty . wikibase:quantityAmount a owl:DatatypeProperty . wikibase:BestRank a owl:Class . wikibase:badge a owl:ObjectProperty . a wikibase:Item ; rdfs:label "bar"@lc2 , "foo"@lc ; schema:description "it's bar"@lc2 , "it's foo"@lc ; skos:altLabel "foo"@lc , "bar"@lc ; ; ; . a wikibase:Statement , , ; wikibase:rank wikibase:NormalRank . a wikibase:Statement , ; wdv:9e335050c59079e3f3c216b8cf077d07 ; "0306-11-03T13:07:06Z"^^xsd:dateTime ; wdv:9e335050c59079e3f3c216b8cf077d07 ; "0306-11-03T13:07:06Z"^^xsd:dateTime ; prov:wasDerivedFrom ; wikibase:rank wikibase:NormalRank . a wikibase:Statement , ; "TestString" ; wikibase:rank wikibase:NormalRank . wdv:9e335050c59079e3f3c216b8cf077d07 a wikibase:TimeValue ; wikibase:timeValue "0306-11-03T13:07:06Z"^^xsd:dateTime ; wikibase:timePrecision "32"^^xsd:int ; wikibase:timeTimezone "0"^^xsd:int ; wikibase:timeCalendarModel wd:Q1985727 . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:ObjectProperty . a owl:DatatypeProperty . a owl:DatatypeProperty . a owl:DatatypeProperty . a owl:DatatypeProperty . a owl:DatatypeProperty . a owl:DatatypeProperty . a wikibase:Reference ; "0306-11-03T13:07:06Z"^^ ; wdv:9e335050c59079e3f3c216b8cf077d07 . a owl:Class ; owl:complementOf _:node1a5d6h9m9x1 . _:node1a5d6h9m9x1 a owl:Restriction ; owl:onProperty ; owl:someValuesFrom owl:Thing . a owl:Class ; owl:complementOf _:node1a5d6h9m9x3 . _:node1a5d6h9m9x3 a owl:Restriction ; owl:onProperty ; owl:someValuesFrom owl:Thing . a owl:Class ; owl:complementOf _:node1a5d6h9m9x5 . _:node1a5d6h9m9x5 a owl:Restriction ; owl:onProperty ; owl:someValuesFrom owl:Thing . a owl:Class ; owl:complementOf _:node1a5d6h9m9x7 . _:node1a5d6h9m9x7 a owl:Restriction ; owl:onProperty ; owl:someValuesFrom xsd:string . Wikidata-Toolkit-0.14.6/wdtk-storage/000077500000000000000000000000001444772566300174175ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/LICENSE.txt000066400000000000000000000261351444772566300212510ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-storage/pom.xml000066400000000000000000000011031444772566300207270ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-storage jar Wikidata Toolkit Storage WDTK support for managing large collections of Wikibase data Wikidata-Toolkit-0.14.6/wdtk-storage/src/000077500000000000000000000000001444772566300202065ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/000077500000000000000000000000001444772566300211325ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/000077500000000000000000000000001444772566300220535ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/000077500000000000000000000000001444772566300226425ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/000077500000000000000000000000001444772566300244375ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300254105ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/000077500000000000000000000000001444772566300270545ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructures/000077500000000000000000000000001444772566300321315ustar00rootroot00000000000000BitVector.java000066400000000000000000000040621444772566300346200ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; import java.util.Iterator; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Interface for a bit vector. * * @author Julian Mendez */ public interface BitVector { /** * Returns true if and only if the specified object is also a * BitVector and both contain the same bits in the same order. * * @param o * the object to be compared with this BitVector * * @return true if and only if the specified object is also a * BitVector and both contain the same bits in the same order */ @Override boolean equals(Object o); /** * This is the "access" method of bit vectors. * * @return value of a bit at position * * @throws IndexOutOfBoundsException * if the position is out of range */ boolean getBit(long position); /** * @return size of this bit vector */ long size(); /** * @return an iterator for this bit vector */ Iterator iterator(); /** * Appends a bit to this bit vector. * * @return true if the element was successfully added */ boolean addBit(boolean bit); /** * Sets a bit at a particular position. * * @param position * position * @param bit * bit * @throws IndexOutOfBoundsException * if the position is out of range */ void setBit(long position, boolean bit); } BitVectorImpl.java000066400000000000000000000216531444772566300354470ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; import org.apache.commons.lang3.Validate; /** * Default implementation of {@link BitVector}. This implementation contains an * array of long, and each long stores 64 bits. When more space is * needed, the internal array grows exponentially. This bit vector is * flexible, which means that: *
      *
    1. it is always possible to store a bit in any non-negative position without * explicitly resizing the vector,
    2. *
    3. any non-negative position outside the bit vector can be retrieved and * contains a false.
    4. *
    * * @author Julian Mendez * */ public class BitVectorImpl implements BitVector, Iterable { static final int GROWTH_FACTOR = 2; static final int LG_WORD_SIZE = 6; static final int MINIMUM_ARRAY_SIZE = 1; static final int WORD_MASK = 0x3F; static final int WORD_SIZE = 0x40; long[] arrayOfBits; int hashCode; long size; boolean validHashCode = false; /** * Constructor of a bit vector of size 0. * */ public BitVectorImpl() { this.arrayOfBits = new long[MINIMUM_ARRAY_SIZE]; } /** * Copy constructor of a bit vector. * * @param bitVector * bit vector */ public BitVectorImpl(BitVector bitVector) { Validate.notNull(bitVector, "Bit vector cannot be null."); if (bitVector instanceof BitVectorImpl) { BitVectorImpl other = (BitVectorImpl) bitVector; this.arrayOfBits = new long[other.arrayOfBits.length]; this.size = bitVector.size(); System.arraycopy(other.arrayOfBits, 0, this.arrayOfBits, 0, other.arrayOfBits.length); } else { this.arrayOfBits = new long[getMinimumArraySize(bitVector.size())]; this.size = bitVector.size(); for (long index = 0; index < bitVector.size(); index++) { setBit(index, bitVector.getBit(index)); } } } /** * Constructor of a bit vector of size initialSize. The bit vector * contains false at all indexes. * * @param initialSize * initial size of this bit vector * */ public BitVectorImpl(long initialSize) { if (initialSize < 0) { throw new IllegalArgumentException("Wrong bit vector size '" + initialSize + "'. Bit vector size must be non-negative."); } this.arrayOfBits = new long[getMinimumArraySize(initialSize)]; this.size = initialSize; } /** * @param position * position * @param word * word * @return the value of a bit at a specific position of a word */ static boolean getBitInWord(byte position, long word) { if ((position < 0) || (position >= WORD_SIZE)) { throw new IndexOutOfBoundsException(); } return ((word >> position) & 1) == 1; } /** * @param bitVectorSize * bit vector sizes * @return the minimum array size for a bit vector of bitVectorSize */ static int getMinimumArraySize(long bitVectorSize) { return Math.max(MINIMUM_ARRAY_SIZE, getSizeInWords(bitVectorSize)); } /** * @param position * position * @param word * word * @param bit * bit * @return the resulting word of setting a bit at a specific * position of a word */ static long setBitInWord(byte position, boolean bit, long word) { if (getBitInWord(position, word) == bit) { return word; } else { return word ^ (((long) 1) << position); } } /** * @param word * word to be rendered * @return a string representation of a word with the least * significant bit first */ static String wordToString(long word) { String binaryDigits = String.format("%" + WORD_SIZE + "s", Long.toBinaryString(word)).replace(' ', '0'); return (new StringBuilder(binaryDigits)).reverse().toString(); } /** * @param sizeInBits * size in bits * @return the size in words */ static int getSizeInWords(long sizeInBits) { return (int) ((sizeInBits >> LG_WORD_SIZE) + 1); } @Override public boolean addBit(boolean bit) { this.validHashCode = false; this.size++; if (getSizeInWords(this.size) > this.arrayOfBits.length) { resizeArray(GROWTH_FACTOR * this.arrayOfBits.length); } setBit(this.size - 1, bit); return true; } /** * @param position * position * @throws IndexOutOfBoundsException * if the position is a negative number */ void assertNonNegativePosition(long position) throws IndexOutOfBoundsException { if ((position < 0)) { throw new IndexOutOfBoundsException("Position " + position + " is out of bounds."); } } /** * Ensures that the bit vector is large enough to contain an element at the * given position. If the bit vector needs to be enlarged, new * false elements are added. * * @param position * position */ void ensureSize(long position) { assertNonNegativePosition(position); if (position >= this.size) { this.validHashCode = false; long newSize = position + 1; int arrayOfBitsLength = this.arrayOfBits.length; int sizeInWords = getSizeInWords(newSize); while (sizeInWords > arrayOfBitsLength) { arrayOfBitsLength = GROWTH_FACTOR * arrayOfBitsLength; } resizeArray(arrayOfBitsLength); this.size = newSize; } } /** * @return a hash code for the current bit vector */ int computeHashCode() { int ret = (int) this.size; int arraySize = (int) (this.size >> LG_WORD_SIZE); for (int i = 0; i < arraySize; i++) { ret += (0x1F * this.arrayOfBits[i]); } long lastWordStart = (arraySize << LG_WORD_SIZE); long remainingBits = this.size - lastWordStart; long lastWord = 0; for (int i = 0; i < remainingBits; i++) { lastWord = setBitInWord((byte) i, getBit(i + lastWordStart), lastWord); } ret += (0x1F * lastWord); return ret; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof BitVector)) { return false; } BitVector other = (BitVector) obj; if (this.size != other.size()) { return false; } long comparisonFirstPos = 0; if (other instanceof BitVectorImpl) { // if the other bit vector has the same representation, it is // possible to compare their arrays of bits BitVectorImpl otherBitVectorImpl = (BitVectorImpl) other; int arraySize = (int) (this.size >> LG_WORD_SIZE); // only full words can be compared, because two bit // vectors that are equal can have different values in the unused // bits for (int i = 0; i < arraySize; i++) { if (this.arrayOfBits[i] != otherBitVectorImpl.arrayOfBits[i]) { return false; } } comparisonFirstPos = ((long) arraySize << LG_WORD_SIZE); } for (long i = comparisonFirstPos; i < this.size; i++) { // bit-by-bit comparison of the remaining bits if (getBit(i) != other.getBit(i)) { return false; } } return true; } @Override public boolean getBit(long position) { assertNonNegativePosition(position); if (position >= this.size) { return false; } int arrayPos = (int) (position >> LG_WORD_SIZE); byte wordPos = (byte) (position & WORD_MASK); return getBitInWord(wordPos, this.arrayOfBits[arrayPos]); } @Override public int hashCode() { if (!this.validHashCode) { this.hashCode = computeHashCode(); this.validHashCode = true; } return this.hashCode; } @Override public Iterator iterator() { return new BitVectorIterator(this); } /** * Resizes the array that represents this bit vector. * * @param newArraySize * new array size */ void resizeArray(int newArraySize) { long[] newArray = new long[newArraySize]; System.arraycopy(this.arrayOfBits, 0, newArray, 0, Math.min(this.arrayOfBits.length, newArraySize)); this.arrayOfBits = newArray; } @Override public void setBit(long position, boolean bit) { ensureSize(position); this.validHashCode = false; int arrayPos = (int) (position >> LG_WORD_SIZE); byte wordPos = (byte) (position & WORD_MASK); this.arrayOfBits[arrayPos] = setBitInWord(wordPos, bit, this.arrayOfBits[arrayPos]); } @Override public long size() { return this.size; } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (long position = 0; position < this.size;) { sb.append(getBit(position) ? "1" : "0"); position++; } return sb.toString(); } } BitVectorIterator.java000066400000000000000000000037431444772566300363370ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Storage * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; import java.util.NoSuchElementException; import org.apache.commons.lang3.Validate; /** * This is an iterator for a bit vector. * * @author Julian Mendez */ public class BitVectorIterator implements Iterator { final BitVector bitVector; int pointer = 0; /** * Constructs an iterator for a bit vector. * * @param bitVector * bit vector */ public BitVectorIterator(BitVector bitVector) { Validate.notNull(bitVector, "Bit vector cannot be null."); this.bitVector = bitVector; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof BitVectorIterator)) { return false; } BitVectorIterator other = (BitVectorIterator) o; return (this.pointer == other.pointer) && this.bitVector.equals(other.bitVector); } @Override public int hashCode() { return this.pointer + (0x1F * this.bitVector.hashCode()); } @Override public boolean hasNext() { return this.pointer < this.bitVector.size(); } @Override public Boolean next() { if (this.pointer >= this.bitVector.size()) { throw new NoSuchElementException(); } return this.bitVector.getBit(this.pointer++); } @Override public void remove() { throw new UnsupportedOperationException(); } } CountBitsArray.java000066400000000000000000000112711444772566300356300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.ArrayUtils; /** * This class keeps the count of occurrences of true values in a * bit vector. This implementation divides the bit vector in blocks of equal * size. It keeps an array with the count of true values present in * each block. *

    * For example, given the bit vector: 10010 (0 is false, 1 is * true), with a block size of 2, the array contains: [1, 2, 2]. * The first block contains 1 true value, the second block contains * 1 more true value, in total 2. The third block is incomplete, * since it has only one bit, and it does not contain more true * values. *

    * For efficiency reasons, this class assumes that the bit vector is unmodified. * Any modification of the bit vector needs to be notified in * {@link FindPositionArray#update()}. * * @see RankedBitVectorImpl * * @author Julian Mendez */ class CountBitsArray { /** * The bit vector, which is assumed unmodified. */ final BitVector bitVector; /** * The size of each block. */ final int blockSize; /** * This array contains the number of true values found in each * block. */ long[] countArray; /** * If this value is true, there is a new bit vector and the * array needs to be updated. */ boolean hasChanged; /** * Creates a count array with a given block size. * * @param blockSize * block size; this value must be a positive number * @throws IllegalArgumentException * if the block size is not a positive number */ public CountBitsArray(BitVector bitVector, int blockSize) { if (blockSize < 1) { throw new IllegalArgumentException( "The block size must be a positive number. The received value was: " + blockSize + "."); } this.bitVector = bitVector; this.hasChanged = true; this.blockSize = blockSize; } /** * Returns the number of occurrences of bit up to position. * * @return number of occurrences of bit up to position */ public long countBits(boolean bit, long position) { updateCount(); int blockNumber = (int) (position / this.blockSize); long mark = ((long) blockNumber) * this.blockSize; long trueValues = 0; if (blockNumber > 0) { trueValues = this.countArray[blockNumber - 1]; } for (long index = mark; index <= position; index++) { trueValues += this.bitVector.getBit(index) ? 1 : 0; } return bit ? trueValues : ((position + 1) - trueValues); } /** * Returns the block size. * * @return the block size */ int getBlockSize() { return this.blockSize; } /** * Returns a list of Long that contains the indices of positions computed * according to the given bit vector. * * @return a list of Long that contains the indices of positions computed * according to the given bit vector */ List getCountList() { List ret = new ArrayList<>(); long lastValue = 0; int positionInBlock = 0; for (long index = 0; index < this.bitVector.size(); index++) { if (this.bitVector.getBit(index)) { lastValue++; } positionInBlock++; if (positionInBlock == this.blockSize) { ret.add(lastValue); positionInBlock = 0; } } if (positionInBlock > 0) { ret.add(lastValue); } return ret; } @Override public String toString() { updateCount(); return Arrays.toString(this.countArray); } /** * Notifies this object that the bit vector has changed, and therefore, the * computed internal array must be updated. */ public void update() { this.hasChanged = true; } /** * This method updates the internal array only if the bit vector has been * changed since the last update or creation of this class. */ void updateCount() { if (this.hasChanged) { this.countArray = ArrayUtils.toPrimitive(getCountList().toArray( new Long[0])); this.hasChanged = false; } } } FindPositionArray.java000066400000000000000000000156021444772566300363250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.ArrayUtils; /** * This class keeps the positions where the n-th bit value can be * found in a bit vector (bit can be true or * false). This class uses an array to store these positions. Each * cell of the array covers a block in the bit vector, and to find the positions * in this block, the method iterates on the bit vector. *

    * For example, let us suppose we have the following bit vector: 11010001 (0 is * false and 1 is true), with a block size of 2. For * the case of true, the array stores the position where * true is found for the zeroth occurrence, the second occurrence, * the forth occurrence, and so on and so forth. The first cell of the array * contains -1. This convention comes handy because the zeroth occurrence is * undefined and the first occurrence needs to be found in the following cell of * the bit vector, i.e. at position 0. Anyway, since the zeroth occurrence is * not defined, the {@link #findPosition(long)} method returns * {@link RankedBitVector#NOT_FOUND} for that value. *

    * The array for true is [-1, 1, 7]. The second occurrence of * true is at position 1 in the bit vector. The forth occurrence of * true is at position 7 in the bit vector. Analogously, the array * for false is [-1, 4, 6]. The positions of false are * 4 for the second occurrence, and 6 for the forth occurrence. *

    * Please observe that the blocks have the same size in number of occurrences, * but may cover different number of positions in the bit vector. *

    * For efficiency reasons, this class assumes that the bit vector is unmodified. * Any modification of the bit vector needs to be notified in * {@link FindPositionArray#update()}. * * @see RankedBitVectorImpl * * @author Julian Mendez */ class FindPositionArray { /** * Value to be consider in the occurrences. */ final boolean bit; /** * The bit vector, which is assumed unmodified. */ final BitVector bitVector; /** * This is the size of each block of occurrences. */ final int blockSize; /** * If this value is true, there is a new bit vector and the * array needs to be updated. */ boolean hasChanged; /** * This array contains the position. */ long[] positionArray; /** * Constructs a new array using a given block size of occurrences. * * @param bitVector * bit vector * @param bit * bit * @param blockSizeGTE64 * block size; this value must be greater than or equal to 64. * @throws IllegalArgumentException * if block size is less than 64 */ public FindPositionArray(BitVector bitVector, boolean bit, int blockSizeGTE64) { this(blockSizeGTE64, bitVector, bit); if (blockSizeGTE64 < 0x40) { throw new IllegalArgumentException( "The block size must be greater than or equal to " + 0x40 + ". The received value was " + blockSizeGTE64 + "."); } } /** * Constructs a new array using a given block size of occurrences. This * constructor does not restrict the block size and it should be used only * as an auxiliary constructor or for testing. * * @param bitVector * bit vector * @param bit * bit * @param blockSize * block size; this value must be a positive number. */ FindPositionArray(int blockSize, BitVector bitVector, boolean bit) { if (blockSize < 1) { throw new IllegalArgumentException( "The block size must be a positive number. The received value was: " + blockSize + "."); } this.bitVector = bitVector; this.hasChanged = true; this.bit = bit; this.blockSize = blockSize; } /** * Returns the position for a given number of occurrences or NOT_FOUND if * this value is not found. * * @param nOccurrence * number of occurrences * @return the position for a given number of occurrences or NOT_FOUND if * this value is not found */ public long findPosition(long nOccurrence) { updateCount(); if (nOccurrence <= 0) { return RankedBitVector.NOT_FOUND; } int findPos = (int) (nOccurrence / this.blockSize); if (findPos < this.positionArray.length) { long pos0 = this.positionArray[findPos]; long leftOccurrences = nOccurrence - (findPos * this.blockSize); if (leftOccurrences == 0) { return pos0; } for (long index = pos0 + 1; index < this.bitVector.size(); index++) { if (this.bitVector.getBit(index) == this.bit) { leftOccurrences--; } if (leftOccurrences == 0) { return index; } } } return RankedBitVector.NOT_FOUND; } /** * Returns a list of Long that contains the indices of positions computed * according to the given bit vector. * * @return a list of Long that contains the indices of positions computed * according to the given bit vector */ List getPositionList() { List ret = new ArrayList<>(); ret.add(-1L); /* * This -1 is pointing to the previous position of the first valid * position of the bit vector, which starts at index 0. Since the zeroth * occurrence of a bit is undefined, the first occurrence can be at * position 0, or later. */ long count = 0; for (long index = 0; index < this.bitVector.size(); index++) { if (this.bitVector.getBit(index) == this.bit) { count++; } if (count >= this.blockSize) { count = 0; ret.add(index); } } return ret; } @Override public String toString() { updateCount(); return Arrays.toString(this.positionArray); } /** * Notifies this object that the bit vector has changed, and therefore, the * computed internal array must be updated. */ public void update() { this.hasChanged = true; } /** * This method updates the internal array only if the bit vector has been * changed since the last update or creation of this class. */ void updateCount() { if (this.hasChanged) { this.positionArray = ArrayUtils.toPrimitive(getPositionList() .toArray(new Long[0])); this.hasChanged = false; } } } RankedBitVector.java000066400000000000000000000042721444772566300357500ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Storage * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A ranked bit vector provides operations to compute: *

      *
    • rank ({@link RankedBitVector#countBits}): number of occurrences of a bit * at a certain position
    • *
    • select ({@link RankedBitVector#findPosition}): position of the * n-th occurrence of a certain bit
    • *
    * * @author Julian Mendez */ public interface RankedBitVector extends BitVector { /** * This is a distinguished value, which denotes that the position of a * n-th occurrence of a bit was not found. This value is a * negative number. * * @see #findPosition(boolean, long) */ long NOT_FOUND = -1; /** * This is the "rank" method of bit vectors. This method returns * the number of occurrences of bit up to position. * * @param bit * bit * @param position * position * @return number of occurrences of bit at position */ long countBits(boolean bit, long position); /** * This is the "select" method of bit vectors. This method returns * the position of the n-th occurrence (nOccurrence) of * bit or NOT_FOUND if there are not enough occurrences. * * * @param bit * bit * @param nOccurrence * number of occurrences * @return position of the n-th occurrence (nOccurrence) of * bit or NOT_FOUND if there are not enough occurrences */ long findPosition(boolean bit, long nOccurrence); } RankedBitVectorImpl.java000066400000000000000000000134151444772566300365710ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; /** * Default implementation of {@link RankedBitVector}. This implementation uses * auxiliary classes to have efficient performance for the methods of a ranked * bit vector. Hence, {@link #countBits(boolean, long)} uses an instance of * {@link CountBitsArray} and {@link #findPosition(boolean, long)} uses two * instances of {@link FindPositionArray}. * * @see CountBitsArray * * @see FindPositionArray * * @author Julian Mendez */ public class RankedBitVectorImpl implements RankedBitVector, Iterable { static final int defaultCountBitsBlockSize = 0x400; static final int defaultFindPositionBlockSize = 0x2000; final BitVectorImpl bitVector; final CountBitsArray countBitsArray; final FindPositionArray findPositionOfFalse; final FindPositionArray findPositionOfTrue; /** * Constructor of a ranked bit vector of size 0. */ public RankedBitVectorImpl() { this.bitVector = new BitVectorImpl(); this.countBitsArray = new CountBitsArray(this.bitVector, defaultCountBitsBlockSize); this.findPositionOfFalse = new FindPositionArray(this.bitVector, false, defaultFindPositionBlockSize); this.findPositionOfTrue = new FindPositionArray(this.bitVector, true, defaultFindPositionBlockSize); } /** * Copy constructor of a ranked bit vector. * * @param bitVector * bit vector */ public RankedBitVectorImpl(BitVector bitVector) { this.bitVector = new BitVectorImpl(bitVector); if (bitVector instanceof RankedBitVectorImpl) { this.countBitsArray = new CountBitsArray(this.bitVector, ((RankedBitVectorImpl) bitVector).countBitsArray .getBlockSize()); } else { this.countBitsArray = new CountBitsArray(this.bitVector, defaultCountBitsBlockSize); } this.findPositionOfFalse = new FindPositionArray(this.bitVector, false, defaultFindPositionBlockSize); this.findPositionOfTrue = new FindPositionArray(this.bitVector, true, defaultFindPositionBlockSize); } /** * Constructor of a ranked bit vector of size initialSize. The bit * vector contains false at all indexes. * * @param initialSize * initial size of this ranked bit vector */ public RankedBitVectorImpl(long initialSize) { this.bitVector = new BitVectorImpl(initialSize); this.countBitsArray = new CountBitsArray(this.bitVector, defaultCountBitsBlockSize); this.findPositionOfFalse = new FindPositionArray(this.bitVector, false, defaultFindPositionBlockSize); this.findPositionOfTrue = new FindPositionArray(this.bitVector, true, defaultFindPositionBlockSize); } /** * Constructor of a ranked bit vector of size initialSize and block * size blockSize. The bit vector contains false at all * indexes. * * @param initialSize * initial size of this ranked bit vector * @param countBlockSize * block size to count number of occurrences of a value; this * value must be a positive number * @param findPositionBlockSize * block size to find the position of the n-th occurrence * of a value; this value must be greater than or equal to 64 * @throws IllegalArgumentException * if any of the block sizes is too small */ public RankedBitVectorImpl(long initialSize, int countBlockSize, int findPositionBlockSize) { this.bitVector = new BitVectorImpl(initialSize); this.countBitsArray = new CountBitsArray(this.bitVector, countBlockSize); this.findPositionOfFalse = new FindPositionArray(this.bitVector, false, findPositionBlockSize); this.findPositionOfTrue = new FindPositionArray(this.bitVector, true, findPositionBlockSize); } @Override public boolean addBit(boolean bit) { boolean ret = this.bitVector.addBit(bit); notifyObservers(); return ret; } @Override public long countBits(boolean bit, long position) { return this.countBitsArray.countBits(bit, position); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof BitVector)) { return false; } return this.bitVector.equals(obj); } @Override public long findPosition(boolean bit, long nOccurrence) { if (nOccurrence <= 0) { return NOT_FOUND; } return bit ? this.findPositionOfTrue.findPosition(nOccurrence) : this.findPositionOfFalse.findPosition(nOccurrence); } @Override public boolean getBit(long position) { return this.bitVector.getBit(position); } @Override public int hashCode() { return this.bitVector.hashCode(); } @Override public Iterator iterator() { return this.bitVector.iterator(); } void notifyObservers() { this.countBitsArray.update(); this.findPositionOfFalse.update(); this.findPositionOfTrue.update(); } @Override public void setBit(long position, boolean bit) { boolean oldBit = getBit(position); if (oldBit != bit) { this.bitVector.setBit(position, bit); notifyObservers(); } } @Override public long size() { return this.bitVector.size(); } @Override public String toString() { return this.bitVector.toString(); } } package-info.java000066400000000000000000000014431444772566300352430ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/main/java/org/wikidata/wdtk/storage/datastructures/** * Provides classes of basic data structures. * * @author Julian Mendez */ package org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Storage * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/000077500000000000000000000000001444772566300211655ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/000077500000000000000000000000001444772566300221065ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/000077500000000000000000000000001444772566300226755ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/000077500000000000000000000000001444772566300244725ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300254435ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/storage/000077500000000000000000000000001444772566300271075ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/storage/datastructures/000077500000000000000000000000001444772566300321645ustar00rootroot00000000000000BitVectorImplTest.java000066400000000000000000000206441444772566300363410ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Assert; import org.junit.Test; /** * Test class for {@link BitVectorImpl}. * * @author Julian Mendez * */ public class BitVectorImplTest { /** * Asserts that two bit vectors are equal, and also that the first bit * vector is equal to itself. * * @param bv0 * one bit vector * @param bv1 * another bit vector */ void assertEqualsForBitVector(BitVector bv0, BitVector bv1) { Assert.assertEquals(bv0, bv0); Assert.assertEquals(bv0, bv1); Assert.assertEquals(bv1, bv0); Assert.assertEquals(bv0.hashCode(), bv1.hashCode()); } @Test public void testAdd() { BitVectorImpl bv = new BitVectorImpl(); Assert.assertEquals(0, bv.size()); bv.addBit(true); Assert.assertEquals(1, bv.size()); Assert.assertTrue(bv.getBit(0)); bv.addBit(false); Assert.assertEquals(2, bv.size()); Assert.assertFalse(bv.getBit(1)); bv.addBit(false); Assert.assertEquals(3, bv.size()); Assert.assertFalse(bv.getBit(2)); for (int i = 3; i < 0x1000; i++) { boolean value = (i % 3) == 0; bv.addBit(value); Assert.assertEquals(value, bv.getBit(i)); } } @Test public void testEmptyBitVector() { BitVectorImpl bv0 = new BitVectorImpl(); BitVector bv1 = new BitVectorImpl(); assertEqualsForBitVector(bv0, bv1); BitVectorImpl bv2 = new BitVectorImpl(0); assertEqualsForBitVector(bv1, bv2); } @Test public void testEqualityAndCopyConstructor() { int aLargeNumber = 0x100000; BitVectorImpl bv0 = new BitVectorImpl(); Assert.assertEquals(bv0, bv0); Assert.assertNotEquals(bv0, new Object()); BitVectorImpl bv1 = new BitVectorImpl(); PseudorandomBooleanGenerator generator = new PseudorandomBooleanGenerator( 0x1234); for (int i = 0; i < aLargeNumber; i++) { boolean value = generator.getPseudorandomBoolean(); bv0.addBit(value); bv1.addBit(value); } assertEqualsForBitVector(bv0, bv1); BitVectorImpl bv2 = new BitVectorImpl(bv1); assertEqualsForBitVector(bv0, bv2); bv1.setBit(0x12345, false); bv2.setBit(0x12345, true); Assert.assertNotEquals(bv1, bv2); Assert.assertNotEquals(bv2, bv1); RankedBitVectorImpl bv3 = new RankedBitVectorImpl(bv2); Assert.assertNotEquals(bv1, bv3); Assert.assertNotEquals(bv3, bv1); } @Test public void testGetBit() { long word = 0; for (byte i = 0; i < 0x40; i++) { Assert.assertFalse(BitVectorImpl.getBitInWord(i, word)); } word = 0x0810F; Assert.assertTrue(BitVectorImpl.getBitInWord((byte) 0, word)); Assert.assertTrue(BitVectorImpl.getBitInWord((byte) 1, word)); Assert.assertTrue(BitVectorImpl.getBitInWord((byte) 2, word)); Assert.assertTrue(BitVectorImpl.getBitInWord((byte) 3, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 4, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 5, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 6, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 7, word)); Assert.assertTrue(BitVectorImpl.getBitInWord((byte) 8, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 9, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 10, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 11, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 12, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 13, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 14, word)); Assert.assertTrue(BitVectorImpl.getBitInWord((byte) 15, word)); Assert.assertFalse(BitVectorImpl.getBitInWord((byte) 16, word)); } @Test public void testHashCode() { { BitVectorImpl bv = new BitVectorImpl(); Assert.assertEquals(0, bv.hashCode()); bv.addBit(false); Assert.assertEquals(1, bv.hashCode()); } { BitVectorImpl bv = new BitVectorImpl(); Assert.assertEquals(0, bv.hashCode()); bv.addBit(true); Assert.assertEquals(0x20, bv.hashCode()); } } @Test public void testGetOutOfRange() { Assert.assertFalse(new BitVectorImpl().getBit(1)); Assert.assertFalse(new BitVectorImpl().getBit(Long.MAX_VALUE)); } @Test public void testSetOutOfRange() { BitVectorImpl bv = new BitVectorImpl(); Assert.assertEquals(0, bv.size()); bv.setBit(41, true); Assert.assertEquals(42, bv.size()); Assert.assertFalse(bv.getBit(40)); Assert.assertTrue(bv.getBit(41)); Assert.assertFalse(bv.getBit(42)); Assert.assertFalse(bv.getBit(43)); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSize() { new BitVectorImpl(-1); } @Test(expected = IndexOutOfBoundsException.class) public void testInvalidPositionSizeGet00() { (new BitVectorImpl()).getBit(-1); } @Test(expected = IndexOutOfBoundsException.class) public void testInvalidPositionSizeGet01() { BitVectorImpl.getBitInWord((byte) -1, 0); } @Test(expected = IndexOutOfBoundsException.class) public void testInvalidPositionSizeGet02() { BitVectorImpl.getBitInWord((byte) 0x40, 0); } @Test(expected = IndexOutOfBoundsException.class) public void testInvalidPositionSizeSet00() { BitVectorImpl.setBitInWord((byte) -1, true, 0); } @Test(expected = IndexOutOfBoundsException.class) public void testInvalidPositionSizeSet01() { BitVectorImpl.setBitInWord((byte) 0x40, false, 0); } @Test public void testSetBit() { long word = 0; for (byte i = 0; i < 0x40; i++) { word = BitVectorImpl.setBitInWord(i, true, word); } for (byte i = 0; i < 0x40; i++) { Assert.assertTrue(BitVectorImpl.getBitInWord(i, word)); } for (byte i = 0; i < 0x40; i++) { word = BitVectorImpl.setBitInWord(i, false, word); } for (byte i = 0; i < 0x40; i++) { Assert.assertFalse(BitVectorImpl.getBitInWord(i, word)); } word = 0x0362; for (byte i = 0; i < 0x40; i++) { boolean value = BitVectorImpl.getBitInWord(i, word); word = BitVectorImpl.setBitInWord(i, value, word); Assert.assertEquals(value, BitVectorImpl.getBitInWord(i, word)); value = !value; word = BitVectorImpl.setBitInWord(i, value, word); Assert.assertEquals(value, BitVectorImpl.getBitInWord(i, word)); value = !value; word = BitVectorImpl.setBitInWord(i, value, word); Assert.assertEquals(value, BitVectorImpl.getBitInWord(i, word)); } Assert.assertEquals(0x0362, word); } @Test public void testSize() { { BitVectorImpl bv = new BitVectorImpl(0x100); Assert.assertEquals(0x100, bv.size()); bv.addBit(false); bv.addBit(true); Assert.assertEquals(0x102, bv.size()); } { BitVectorImpl bv = new BitVectorImpl(); Assert.assertEquals(0, bv.size()); for (int i = 0; i < 0x300; i++) { bv.addBit((i % 5) == 0); Assert.assertEquals(i + 1, bv.size()); } } } @Test public void testToString() { BitVectorImpl bv = new BitVectorImpl(); for (int i = 0; i < 0x10; i++) { boolean value = (i % 3) == 0; bv.addBit(value); } Assert.assertEquals("1001001001001001", bv.toString()); for (int i = 0; i < 0x10; i++) { boolean value = (i % 2) == 0; bv.addBit(value); } Assert.assertEquals("10010010010010011010101010101010", bv.toString()); for (int i = 0; i < 0x20; i++) { bv.setBit(i, bv.getBit(i)); } Assert.assertEquals("10010010010010011010101010101010", bv.toString()); for (int i = 0; i < 0x20; i++) { bv.setBit(i, !bv.getBit(i)); } Assert.assertEquals("01101101101101100101010101010101", bv.toString()); } @Test public void testWordToString() { long word = 0; Assert.assertEquals( "0000000000000000000000000000000000000000000000000000000000000000", BitVectorImpl.wordToString(word)); word = -1; Assert.assertEquals( "1111111111111111111111111111111111111111111111111111111111111111", BitVectorImpl.wordToString(word)); word = 0x362; Assert.assertEquals( "0100011011000000000000000000000000000000000000000000000000000000", BitVectorImpl.wordToString(word)); } } BitVectorIteratorTest.java000066400000000000000000000062261444772566300372310ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Storage * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Collections; import java.util.Iterator; import java.util.NoSuchElementException; import org.junit.Assert; import org.junit.Test; /** * Test class for {@link BitVectorIterator}. * * @author Julian Mendez * */ public class BitVectorIteratorTest { @Test public void testHashCode() { Iterator it = (new BitVectorImpl()).iterator(); Assert.assertEquals(0, it.hashCode()); } @Test(expected = NoSuchElementException.class) public void testNoSuchElementException() { new BitVectorImpl().iterator().next(); } @Test(expected = UnsupportedOperationException.class) public void testUnsupportedOperationException() { new BitVectorImpl().iterator().remove(); } @Test public void testVectorWithPseudoRandomValues() { BitVectorImpl bv0 = new BitVectorImpl(); BitVectorImpl bv1 = new BitVectorImpl(); Iterator it = bv0.iterator(); Assert.assertEquals(it, it); Assert.assertEquals(bv0.iterator(), bv1.iterator()); Assert.assertNotEquals(bv0.iterator(), Collections.emptyIterator()); PseudorandomBooleanGenerator generator0 = new PseudorandomBooleanGenerator( 0x1234); for (int i = 0; i < 0x1000; i++) { boolean value = generator0.getPseudorandomBoolean(); bv0.addBit(value); bv1.addBit(value); } PseudorandomBooleanGenerator generator1 = new PseudorandomBooleanGenerator( 0x1234); int i = 0; for (boolean value : bv0) { boolean expectedValue = generator1.getPseudorandomBoolean(); Assert.assertEquals(expectedValue, value); i++; } Assert.assertEquals(i, 0x1000); Assert.assertEquals(bv0.iterator(), bv1.iterator()); Assert.assertNotEquals(bv0.iterator(), (new BitVectorImpl()).iterator()); } @Test public void testVectorWithRegularPattern() { { BitVectorImpl bv = new BitVectorImpl(); for (int i = 0; i < 0x100; i++) { boolean value = (i % 3) == 0; bv.addBit(value); } { Iterator it = bv.iterator(); Assert.assertTrue(it.hasNext()); int i = 0; while (it.hasNext()) { boolean expectedValue = ((i % 3) == 0); boolean value = it.next(); Assert.assertEquals(expectedValue, value); i++; } } } { BitVectorImpl bv = new BitVectorImpl(); for (int i = 0; i < 0x100; i++) { boolean value = (i % 7) == 0; bv.addBit(value); } { int i = 0; for (boolean value : bv) { boolean expectedValue = (i % 7) == 0; Assert.assertEquals(expectedValue, value); i++; } } } } } PseudorandomBooleanGenerator.java000066400000000000000000000025211444772566300405570ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/storage/datastructurespackage org.wikidata.wdtk.storage.datastructures; /* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Pseudorandom Boolean generator. * * @author Julian Mendez * */ public class PseudorandomBooleanGenerator { int seed; /** * Constructs a pseudorandom Boolean generator using a given seed. * * @param seed * seed */ public PseudorandomBooleanGenerator(int seed) { this.seed = seed; } /** * Returns a boolean obtained using a pseudorandom Boolean generator. * * @return a boolean obtained using a pseudorandom Boolean generator * */ public boolean getPseudorandomBoolean() { this.seed = (0x4650 * (this.seed & 0xFFFF)) + (this.seed >> 0x10); return ((this.seed & 1) == 1); } } RankedBitVectorImplTest.java000066400000000000000000000273641444772566300374740ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-storage/src/test/java/org/wikidata/wdtk/storage/datastructures/* * #%L * Wikidata Toolkit Data Model * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.storage.datastructures; import java.util.Iterator; import org.junit.Assert; import org.junit.Test; /** * Test class for {@link RankedBitVectorImpl}. * * @author Julian Mendez * */ public class RankedBitVectorImplTest { /** * Asserts that for every position in a bit vector, * {@link RankedBitVector#countBits(boolean, long)} works as expected. * * @param bv * bit vector */ void assertCorrectCount(RankedBitVector bv) { for (long index = 0; index < bv.size(); index++) { assertCorrectCount(bv, index); } } /** * Asserts that {@link RankedBitVector#countBits(boolean, long)} works as * expected at a particular position. * * @param bv * bit vector * @param position * position */ void assertCorrectCount(RankedBitVector bv, long position) { long expectedCountBitsFalse = countBits(bv, false, position); long computedCountBitsFalse = bv.countBits(false, position); Assert.assertEquals(expectedCountBitsFalse, computedCountBitsFalse); long expectedCountBitsTrue = countBits(bv, true, position); long computedCountBitsTrue = bv.countBits(true, position); Assert.assertEquals(expectedCountBitsTrue, computedCountBitsTrue); } /** * Asserts that for every number of occurrences of a bit value in a bit * vector, {@link RankedBitVector#findPosition(boolean, long)} works as * expected. * * @param bv * bit vector */ void assertCorrectFindPosition(RankedBitVector bv) { for (long index = 0; index < bv.size(); index++) { assertCorrectFindPosition(bv, index); } } /** * Asserts that {@link RankedBitVector#findPosition(boolean, long)} works as * expected considering the given number of occurrences of a bit value. * * @param bv * bit vector * @param nOccurrences * number of occurrences */ void assertCorrectFindPosition(RankedBitVector bv, long nOccurrences) { long expectedFindPositionFalse = findPosition(bv, false, nOccurrences); long computedFindPositionFalse = bv.findPosition(false, nOccurrences); Assert.assertEquals(expectedFindPositionFalse, computedFindPositionFalse); long expectedFindPositionTrue = findPosition(bv, true, nOccurrences); long computedFindPositionTrue = bv.findPosition(true, nOccurrences); Assert.assertEquals(expectedFindPositionTrue, computedFindPositionTrue); } /** * Asserts that two ranked bit vectors are equal, and also that the first * bit vector is equal to itself. * * @param bv0 * one bit vector * @param bv1 * another bit vector */ void assertEqualsForBitVector(RankedBitVector bv0, RankedBitVector bv1) { Assert.assertEquals(bv0, bv0); Assert.assertEquals(bv0, bv1); Assert.assertEquals(bv1, bv0); Assert.assertEquals(bv0.hashCode(), bv1.hashCode()); } /** * Returns the expected value of * {@link RankedBitVector#countBits(boolean, long)}. * * @param bv * bit vector * @param bit * bit value * @param position * position * @return the expected value of * {@link RankedBitVector#countBits(boolean, long)} */ long countBits(BitVector bv, boolean bit, long position) { long ret = 0; for (long index = 0; index <= position; index++) { if (bv.getBit(index) == bit) { ret++; } } return ret; } /** * Returns the expected value of * {@link RankedBitVector#findPosition(boolean, long)}. * * @param bv * bit vector * @param bit * bit value * @param nOccurrences * number of occurrences * @return the expected value of * {@link RankedBitVector#findPosition(boolean, long)} */ long findPosition(BitVector bv, boolean bit, long nOccurrences) { if (nOccurrences == 0) { return RankedBitVector.NOT_FOUND; } long accumOccurrences = 0; for (long index = 0; index < bv.size(); index++) { if (bv.getBit(index) == bit) { accumOccurrences++; } if (accumOccurrences == nOccurrences) { return index; } } return RankedBitVector.NOT_FOUND; } @Test public void testAdd() { RankedBitVectorImpl bv = new RankedBitVectorImpl(); Assert.assertEquals(0, bv.size()); bv.addBit(true); Assert.assertEquals(1, bv.size()); Assert.assertTrue(bv.getBit(0)); bv.addBit(false); Assert.assertEquals(2, bv.size()); Assert.assertFalse(bv.getBit(1)); bv.addBit(false); Assert.assertEquals(3, bv.size()); Assert.assertFalse(bv.getBit(2)); for (int i = 3; i < 0x1000; i++) { boolean value = (i % 3) == 0; bv.addBit(value); Assert.assertEquals(value, bv.getBit(i)); assertCorrectCount(bv, i); } } @Test public void testCountBits() { final long aLargeNumber = 0x100000; PseudorandomBooleanGenerator generator = new PseudorandomBooleanGenerator( 0x1234); RankedBitVectorImpl bv = new RankedBitVectorImpl(new BitVectorImpl()); for (int i = 0; i < aLargeNumber; i++) { boolean value = generator.getPseudorandomBoolean(); bv.addBit(value); } for (int i = 0; i < aLargeNumber; i++) { if ((i % 0x6785) == 0) { assertCorrectCount(bv, i); } } } @Test public void testEmptyBitVector() { RankedBitVectorImpl bv0 = new RankedBitVectorImpl(); Assert.assertEquals(0, bv0.size()); assertCorrectCount(bv0); assertCorrectFindPosition(bv0); Assert.assertNotEquals(bv0, new Object()); Assert.assertEquals(bv0, new BitVectorImpl()); RankedBitVector bv1 = new RankedBitVectorImpl(); RankedBitVectorImpl bv2 = new RankedBitVectorImpl(0); assertEqualsForBitVector(bv1, bv2); assertCorrectCount(bv2); assertCorrectFindPosition(bv2); } @Test public void testEqualityAndCopyConstructor() { final long aLargeNumber = 0x100000; RankedBitVectorImpl bv0 = new RankedBitVectorImpl(); RankedBitVectorImpl bv1 = new RankedBitVectorImpl(); PseudorandomBooleanGenerator generator = new PseudorandomBooleanGenerator( 0x1234); for (int i = 0; i < aLargeNumber; i++) { boolean value = generator.getPseudorandomBoolean(); bv0.addBit(value); bv1.addBit(value); } assertEqualsForBitVector(bv0, bv1); RankedBitVectorImpl bv2 = new RankedBitVectorImpl(bv1); assertEqualsForBitVector(bv0, bv2); } @Test public void testFindPositionBlockSize() { for (int x = 0x80; x >= 0x40; x--) { testFindPositionWithBitVector(new RankedBitVectorImpl(0, 0x10, x)); } } void testFindPositionWithBitVector(RankedBitVectorImpl bv) { Assert.assertEquals(0, bv.size()); bv.addBit(true); Assert.assertEquals(RankedBitVector.NOT_FOUND, bv.findPosition(true, 0)); Assert.assertEquals(0, bv.findPosition(true, 1)); bv.addBit(true); bv.addBit(false); bv.addBit(true); bv.addBit(false); bv.addBit(false); bv.addBit(false); bv.addBit(true); Assert.assertEquals(RankedBitVector.NOT_FOUND, bv.findPosition(false, 0)); Assert.assertEquals(RankedBitVector.NOT_FOUND, bv.findPosition(true, 0)); Assert.assertEquals(0, bv.findPosition(true, 1)); Assert.assertEquals(1, bv.findPosition(true, 2)); Assert.assertEquals(2, bv.findPosition(false, 1)); Assert.assertEquals(3, bv.findPosition(true, 3)); Assert.assertEquals(4, bv.findPosition(false, 2)); Assert.assertEquals(5, bv.findPosition(false, 3)); Assert.assertEquals(6, bv.findPosition(false, 4)); Assert.assertEquals(7, bv.findPosition(true, 4)); Assert.assertEquals(RankedBitVector.NOT_FOUND, bv.findPosition(false, 5)); Assert.assertEquals(RankedBitVector.NOT_FOUND, bv.findPosition(true, 5)); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSizes0() { new RankedBitVectorImpl(1, 0, 0x40); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSizes1() { new RankedBitVectorImpl(1, 2, 0x3F); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSizes2() { new CountBitsArray(new BitVectorImpl(), 0); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSizes3() { new FindPositionArray(0, new BitVectorImpl(), true); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSizes4() { new FindPositionArray(new BitVectorImpl(), true, 0x3F); } @Test(expected = IllegalArgumentException.class) public void testInvalidInitialSizes5() { new RankedBitVectorImpl(-1); } @Test public void testIterator() { RankedBitVectorImpl bv = new RankedBitVectorImpl(new BitVectorImpl()); PseudorandomBooleanGenerator generator = new PseudorandomBooleanGenerator( 0x7531); Assert.assertEquals(0, bv.size()); for (int i = 0; i < 0x300; i++) { bv.addBit(generator.getPseudorandomBoolean()); } Iterator it = bv.iterator(); for (int i = 0; i < 0x300; i++) { boolean value = it.next(); Assert.assertEquals(bv.getBit(i), value); } Assert.assertFalse(it.hasNext()); } @Test public void testSize0() { RankedBitVectorImpl bv = new RankedBitVectorImpl(0x100); Assert.assertEquals(0x100, bv.size()); bv.addBit(false); bv.addBit(true); Assert.assertEquals(0x102, bv.size()); assertCorrectCount(bv); assertCorrectFindPosition(bv); } @Test public void testSize1() { RankedBitVectorImpl bv = new RankedBitVectorImpl(); Assert.assertEquals(0, bv.size()); for (int i = 0; i < 0x300; i++) { bv.addBit((i % 5) == 0); Assert.assertEquals(i + 1, bv.size()); } assertCorrectCount(bv); assertCorrectFindPosition(bv); } @Test public void testToString() { RankedBitVectorImpl bv = new RankedBitVectorImpl(); for (int i = 0; i < 0x10; i++) { boolean value = (i % 3) == 0; bv.addBit(value); } Assert.assertEquals("1001001001001001", bv.toString()); assertCorrectCount(bv); assertCorrectFindPosition(bv); for (int i = 0; i < 0x10; i++) { boolean value = (i % 2) == 0; bv.addBit(value); } Assert.assertEquals("10010010010010011010101010101010", bv.toString()); assertCorrectCount(bv); assertCorrectFindPosition(bv); for (int i = 0; i < 0x20; i++) { bv.setBit(i, bv.getBit(i)); } Assert.assertEquals("10010010010010011010101010101010", bv.toString()); assertCorrectCount(bv); assertCorrectFindPosition(bv); for (int i = 0; i < 0x20; i++) { bv.setBit(i, !bv.getBit(i)); } Assert.assertEquals("01101101101101100101010101010101", bv.toString()); assertCorrectCount(bv); assertCorrectFindPosition(bv); } @Test public void testToStringOfAuxClasses() { BitVectorImpl bv = new BitVectorImpl(); bv.addBit(true); bv.addBit(false); bv.addBit(true); bv.addBit(true); bv.addBit(false); bv.addBit(false); bv.addBit(false); bv.addBit(true); CountBitsArray cba = new CountBitsArray(bv, 2); Assert.assertEquals("[1, 3, 3, 4]", cba.toString()); FindPositionArray fpa = new FindPositionArray(2, bv, false); Assert.assertEquals("[-1, 4, 6]", fpa.toString()); Assert.assertEquals(RankedBitVector.NOT_FOUND, fpa.findPosition(0)); Assert.assertEquals(4, fpa.findPosition(2)); } @Test public void testValidInitialSizes() { new RankedBitVectorImpl(1, 1, 0x40); new RankedBitVectorImpl(1, 2, 0x40); new CountBitsArray(new BitVectorImpl(), 1); new FindPositionArray(1, new BitVectorImpl(), true); new FindPositionArray(new BitVectorImpl(), true, 0x40); new RankedBitVectorImpl(0); } } Wikidata-Toolkit-0.14.6/wdtk-testing/000077500000000000000000000000001444772566300174305ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/LICENSE.txt000066400000000000000000000261351444772566300212620ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-testing/pom.xml000066400000000000000000000016111444772566300207440ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-testing jar Wikidata Toolkit Testing Utilities Helper code that is only used in unit tests ${project.groupId} wdtk-util ${project.version} org.mockito mockito-core ${mockitoVersion} Wikidata-Toolkit-0.14.6/wdtk-testing/src/000077500000000000000000000000001444772566300202175ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/000077500000000000000000000000001444772566300211435ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/000077500000000000000000000000001444772566300220645ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/000077500000000000000000000000001444772566300226535ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/000077500000000000000000000000001444772566300244505ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300254215ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/testing/000077500000000000000000000000001444772566300270765ustar00rootroot00000000000000MockDirectoryManager.java000066400000000000000000000267751444772566300337540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/testingpackage org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.PathMatcher; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.zip.GZIPInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.util.DirectoryManager; /** * Mock implementation of {@link DirectoryManager} that simulates file access * without touching the file system. * * @author Markus Kroetzsch * */ public class MockDirectoryManager implements DirectoryManager { /** * Mock files with this content are interpreted as directories. */ static final String DIRECTORY_MARKER_STRING = "DIRECTORY"; /** * Mock files with this content are interpreted as directories. */ static final byte[] DIRECTORY_MARKER = DIRECTORY_MARKER_STRING .getBytes(StandardCharsets.UTF_8); /** * The mocked file system. This is static so that it can be accessed after a * test even if the directory manager that was used is created internally. */ public static HashMap files = new HashMap<>(); final Path directory; /** * If true, all read methods will return objects that will throw exceptions * when trying to get any data. Used for testing exception handling. */ boolean returnFailingReaders; /** * If false, the directory manager will attempt to create directories when * changing to a location that does not exist. */ final boolean readOnly; /** * Creates a new object but retains all previously stored files. * * @param directory * initial directory that is managed * @param readOnly * if false, the directory manager will attempt to create * directories when changing to a location that does not exist * @throws IOException */ public MockDirectoryManager(Path directory, Boolean readOnly) throws IOException { this(directory, false, readOnly); } /** * Constructor * * @param baseDirectory * the directory where the file manager should point initially; * will be created if not existing * @param readOnly * if false, the directory manager will attempt to create * directories when changing to a location that does not exist * @throws IOException * if there was a problem creating the directory */ public MockDirectoryManager(String baseDirectory, Boolean readOnly) throws IOException { this(Paths.get(baseDirectory), readOnly); } /** * Creates a new object and clears all previously stored mock files if * requested. * * @param directory * initial directory that is managed * @param resetFileSystem * if true, the previously mocked files will be cleared; in this * case, the starting directory will be created, however (even in * read-only mode) * @param readOnly * if false, the directory manager will attempt to create * directories when changing to a location that does not exist * @throws IOException */ public MockDirectoryManager(Path directory, boolean resetFileSystem, boolean readOnly) throws IOException { this.directory = directory; this.readOnly = readOnly; if (resetFileSystem) { files = new HashMap<>(); setDirectory(directory); } if (files.containsKey(directory)) { if (!Arrays.equals(files.get(directory), DIRECTORY_MARKER)) { throw new IOException( "Could not create mock working directory."); } // else: directory exists, nothing to do } else { ensureWritePermission(directory); setDirectory(directory); } } /** * When set to true, every operation that returns reader objects to access * some file will return objects that fail with exceptions when trying to * read the file. This can be used to simulate problems like insufficient * access rights or files becoming inaccessible after being opened. *

    * The property is inherited by any submanagers that are created by this * object. * * @param returnFailingReaders * whether read operations should fail */ public void setReturnFailingReaders(boolean returnFailingReaders) { this.returnFailingReaders = returnFailingReaders; } @Override public String toString() { return "[mocked directory] " + this.directory.toString(); } /** * Sets the contents of the file at the given path and creates all parent * directories in our mocked view of the file system. *

    * This method is used for mocking and is always successful, even if the * object is in read-only mode otherwise. * * @param path * @param contents * @throws IOException */ public void setFileContents(Path path, String contents) throws IOException { setFileContents(path, contents, CompressionType.NONE); } /** * Sets the contents of the file at the given path and creates all parent * directories in our mocked view of the file system. If a compression is * chosen, the file contents is the compressed version of the given * contents. Strings are encoded as UTF8. *

    * This method is used for mocking and is always successful, even if the * object is in read-only mode otherwise. * * @param path * @param contents * @param compressionType * @throws IOException */ public void setFileContents(Path path, String contents, CompressionType compressionType) throws IOException { files.put(path, MockStringContentFactory.getBytesFromString(contents, compressionType)); Path parent = path.getParent(); if (parent != null) { setFileContents(parent, DIRECTORY_MARKER_STRING); } } /** * Create the given directory and all parent directories in our mocked view * of the file system. *

    * This method is used for mocking and is always successful, even if the * object is in read-only mode otherwise. * * @param path * @throws IOException */ public void setDirectory(Path path) throws IOException { setFileContents(path, DIRECTORY_MARKER_STRING); } @Override public DirectoryManager getSubdirectoryManager(String subdirectoryName) throws IOException { MockDirectoryManager result = new MockDirectoryManager( directory.resolve(subdirectoryName), false, this.readOnly); result.setReturnFailingReaders(this.returnFailingReaders); return result; } @Override public boolean hasSubdirectory(String subdirectoryName) { Path directoryPath = this.directory.resolve(subdirectoryName); return Arrays.equals(DIRECTORY_MARKER, files.get(directoryPath)); } @Override public boolean hasFile(String fileName) { Path filePath = this.directory.resolve(fileName); return files.containsKey(filePath) && !Arrays.equals(files.get(filePath), DIRECTORY_MARKER); } @Override public long createFile(String fileName, InputStream inputStream) throws IOException { Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); ByteArrayOutputStream out = new ByteArrayOutputStream(); int nextByte; while ((nextByte = inputStream.read()) >= 0) { out.write(nextByte); } out.close(); files.put(filePath, out.toByteArray()); return out.size(); } @Override public long createFileAtomic(String fileName, InputStream inputStream) throws IOException { return createFile(fileName, inputStream); } @Override public void createFile(String fileName, String fileContents) throws IOException { if (this.hasFile(fileName)) { throw new FileAlreadyExistsException("File exists"); } Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); files.put(filePath, fileContents.getBytes(StandardCharsets.UTF_8)); } @Override public OutputStream getOutputStreamForFile(String fileName) throws IOException { Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); return new MockOutputStream(filePath); } @Override public InputStream getInputStreamForFile(String fileName, CompressionType compressionType) throws IOException { if (compressionType == CompressionType.GZIP) { return new GZIPInputStream(getInputStreamForMockFile(fileName)); } else if (compressionType == CompressionType.BZ2) { return new BZip2CompressorInputStream( getInputStreamForMockFile(fileName)); } else { return getInputStreamForMockFile(fileName); } } /** * Get an input stream for the mocked contents of the given file, or throw * an exception if the file does not exist. * * @param fileName * @return input stream for file * @throws FileNotFoundException */ InputStream getInputStreamForMockFile(String fileName) throws FileNotFoundException { if (!hasFile(fileName)) { throw new FileNotFoundException("Could not find file \"" + fileName + "\" in current directory \"" + this.directory.toString() + "\""); } if (this.returnFailingReaders) { return MockStringContentFactory.getFailingInputStream(); } else { Path filePath = this.directory.resolve(fileName); return MockStringContentFactory.newMockInputStream(files .get(filePath)); } } @Override public List getSubdirectories(String glob) { List result = new ArrayList<>(); PathMatcher pathMatcher = FileSystems.getDefault().getPathMatcher( "glob:" + glob); for (Path path : files.keySet()) { if (!this.directory.equals(path.getParent())) { continue; } if (pathMatcher.matches(path.getFileName())) { result.add(path.getFileName().toString()); } } return result; } /** * Returns the byte contents of the mocked file for the given path. If the * file is not mocked, null is returned. If the file is a mocked directory, * the bytes of {@link MockDirectoryManager#DIRECTORY_MARKER} are returned. * * @param filePath * the path of the mocked file * @return byte contents of mocked file */ public static byte[] getMockedFileContents(Path filePath) { return files.get(filePath); } /** * Throws an exception if the object is in read-only mode. The file path is * only needed for the error message. A detailed check for writability is * not performed (if there is a specific problem for this one path, e.g., * due to missing permissions, an exception will be created in due course * anyway). * * @param writeFilePath * the name of the file we would like to write to * @throws IOException * if in read-only mode */ void ensureWritePermission(Path writeFilePath) throws IOException { if (this.readOnly) { throw new IOException("Cannot write to \"" + writeFilePath.toString() + "\" since we are in read-only mode."); } } } Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/testing/MockOutputStream.java000066400000000000000000000021521444772566300332270ustar00rootroot00000000000000package org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit Testing Utilities * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.file.Path; public class MockOutputStream extends ByteArrayOutputStream { final Path tartgetPath; public MockOutputStream(Path filePath) { this.tartgetPath = filePath; } @Override public void close() throws IOException { super.close(); MockDirectoryManager.files.put(this.tartgetPath, this.toByteArray()); } } MockStringContentFactory.java000066400000000000000000000131471444772566300346330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/testingpackage org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.net.URL; import java.nio.charset.StandardCharsets; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.mockito.Mockito; import org.wikidata.wdtk.util.CompressionType; /** * Helper class to create BufferedReaders and InputStreams with predefined * contents. * * @author Markus Kroetzsch * */ public class MockStringContentFactory { /** * Returns a new InputStream that gives access to the contents as given in * the input string, encoded in UTF-8. * * @param contents * @return an input stream for the given string */ public static InputStream newMockInputStream(String contents) { return new ByteArrayInputStream( contents.getBytes(StandardCharsets.UTF_8)); } /** * Returns a new InputStream that gives access to the contents as given in * the input bytes. * * @param contents * @return an input stream for the given bytes */ public static InputStream newMockInputStream(byte[] contents) { return new ByteArrayInputStream(contents); } /** * Loads a string from the file at the given URL. This should only be used * for relatively small files, obviously. The file contents is interpreted * as UTF-8. * * @param url * @return string contents of the file at the given URL * @throws IOException * if the URL could not be resolved or the file could not be * read */ public static String getStringFromUrl(URL url) throws IOException { BufferedReader br = new BufferedReader(new InputStreamReader( url.openStream(), StandardCharsets.UTF_8)); return getStringFromBufferedReader(br); } /** * Loads a string from the given buffered reader. Newline will be appended * after each line but the last. * * @param bufferedReader * @return string contents of the buffered reader * @throws IOException * if it was not possible to read from the buffered reader */ public static String getStringFromBufferedReader( BufferedReader bufferedReader) throws IOException { StringBuilder contentsBuilder = new StringBuilder(); String line; boolean firstLine = true; while ((line = bufferedReader.readLine()) != null) { if (firstLine) { firstLine = false; } else { contentsBuilder.append("\n"); } contentsBuilder.append(line); } return contentsBuilder.toString(); } /** * Loads a string from the given input stream. UTF-8 encoding will be * assumed. Newline will be appended after each line but the last. * * @param inputStream * @return string contents of the input stream * @throws IOException * if it was not possible to read from the buffered reader */ public static String getStringFromInputStream(InputStream inputStream) throws IOException { BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(inputStream, StandardCharsets.UTF_8)); String result = MockStringContentFactory .getStringFromBufferedReader(bufferedReader); bufferedReader.close(); return result; } /** * Returns an input stream that will throw IOExceptions on common reading * operations. * * @return input stream that fails on reading */ public static InputStream getFailingInputStream() { InputStream is = Mockito.mock(InputStream.class); try { Mockito.doThrow(new IOException()).when(is).read(); Mockito.doThrow(new IOException()).when(is).read(Mockito.any()); Mockito.doThrow(new IOException()) .when(is) .read(Mockito.any(), Mockito.anyInt(), Mockito.anyInt()); } catch (IOException e) { throw new RuntimeException( "Mockito should not throw anything here. Strange.", e); } return is; } /** * Turns a string into a sequence of bytes, possibly compressed. In any * case, the character encoding used for converting the string into bytes is * UTF8. * * @param string * @param compressionType * @return * @throws IOException */ public static byte[] getBytesFromString(String string, CompressionType compressionType) throws IOException { switch (compressionType) { case NONE: return string.getBytes(StandardCharsets.UTF_8); case BZ2: case GZIP: ByteArrayOutputStream out = new ByteArrayOutputStream(); OutputStreamWriter ow; if (compressionType == CompressionType.GZIP) { ow = new OutputStreamWriter( new GzipCompressorOutputStream(out), StandardCharsets.UTF_8); } else { ow = new OutputStreamWriter( new BZip2CompressorOutputStream(out), StandardCharsets.UTF_8); } ow.write(string); ow.close(); return out.toByteArray(); default: throw new RuntimeException("Unknown compression type " + compressionType); } } } MockWebResourceFetcher.java000066400000000000000000000121541444772566300342250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/testingpackage org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.HashMap; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.util.WebResourceFetcher; /** * Mock implementation of {@link WebResourceFetcher}. * * @author Markus Kroetzsch * */ public class MockWebResourceFetcher implements WebResourceFetcher { public final HashMap webResources; boolean returnFailingReaders; /** * Constructor. * */ public MockWebResourceFetcher() { this.webResources = new HashMap<>(); } /** * When set to true, every operation that returns reader or stream objects * to access some resource will return objects that fail with exceptions * when trying to read data. This can be used to simulate problems like * failing network connections after opening an online resource. * * @param returnFailingReaders * whether read operations should fail */ public void setReturnFailingReaders(boolean returnFailingReaders) { this.returnFailingReaders = returnFailingReaders; } /** * Defines the contents of a new web resource. * * @param url * the URL string * @param contents * the string contents * @throws IOException */ public void setWebResourceContents(String url, String contents) throws IOException { setWebResourceContents(url, contents, CompressionType.NONE); } /** * Defines the contents of a new web resource. * * @param url * the URL string * @param contents * the string contents * @param compressionType * the compression to use on the mocked contents * @throws IOException */ public void setWebResourceContents(String url, String contents, CompressionType compressionType) throws IOException { this.webResources.put(url, MockStringContentFactory.getBytesFromString( contents, compressionType)); } /** * Defines the contents of a new web resource by taking the string from a * given (Java) resource. * * @param url * the URL string * @param resource * the Java resource name * @param resourceClass * the Class relative to which the resource should be resolved * (since resources are stored relative to a classpath); can * usually be obtained with getClass() from the calling object * @throws IOException * if the Java resource could not be loaded */ public void setWebResourceContentsFromResource(String url, String resource, Class resourceClass) throws IOException { setWebResourceContentsFromResource(url, resource, resourceClass, CompressionType.NONE); } /** * Defines the contents of a new web resource by taking the string from a * given (Java) resource, possibly using additional compression. * * @param url * the URL string * @param resource * the Java resource name * @param resourceClass * the Class relative to which the resource should be resolved * (since resources are stored relative to a classpath); can * usually be obtained with getClass() from the calling object * @param compressionType * the compression to use on the mocked contents * @throws IOException * if the Java resource could not be loaded */ public void setWebResourceContentsFromResource(String url, String resource, Class resourceClass, CompressionType compressionType) throws IOException { URL resourceUrl = resourceClass.getResource(resource); String contents = MockStringContentFactory .getStringFromUrl(resourceUrl); setWebResourceContents(url, contents, compressionType); } @Override public InputStream getInputStreamForUrl(String urlString) throws IOException { return getInputStreamForMockWebResource(urlString); } /** * Returns an input stream for the content mocked for given URL. It is * checked that the URL is valid. * * @param urlString * @return input stream for resource * @throws IOException */ InputStream getInputStreamForMockWebResource(String urlString) throws IOException { if (!this.webResources.containsKey(urlString)) { throw new IOException("Inaccessible URL (not mocked): " + urlString); } if (this.returnFailingReaders) { return MockStringContentFactory.getFailingInputStream(); } else { return MockStringContentFactory .newMockInputStream(this.webResources.get(urlString)); } } } Wikidata-Toolkit-0.14.6/wdtk-testing/src/main/java/org/wikidata/wdtk/testing/package-info.java000066400000000000000000000015061444772566300322670ustar00rootroot00000000000000/** * Package for general helper code that is only used for testing purposes. * * @author Markus Kroetzsch * */ package org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit utilities for testing * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/000077500000000000000000000000001444772566300211765ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/000077500000000000000000000000001444772566300221175ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/org/000077500000000000000000000000001444772566300227065ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/org/wikidata/000077500000000000000000000000001444772566300245035ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300254545ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/org/wikidata/wdtk/testing/000077500000000000000000000000001444772566300271315ustar00rootroot00000000000000MockDirectoryManagerTest.java000066400000000000000000000176231444772566300346370ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/org/wikidata/wdtk/testingpackage org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.util.DirectoryManager; import static org.junit.Assert.*; public class MockDirectoryManagerTest { MockDirectoryManager mdm; Path basePath; @Before public void setUp() throws Exception { basePath = Paths.get(System.getProperty("user.dir")); mdm = new MockDirectoryManager(basePath, true, false); mdm.setDirectory(basePath.resolve("dir1").resolve("subdir")); mdm.setFileContents(basePath.resolve("dir2").resolve("test.txt"), "Test contents"); mdm.setFileContents( basePath.resolve("anotherdir").resolve("test.txt.bz2"), "Test BZ2 contents\nMore contents", CompressionType.BZ2); mdm.setFileContents( basePath.resolve("anotherdir").resolve("test.txt.gz"), "Test GZIP contents", CompressionType.GZIP); } @Test public void newSubdirectoryManager() throws IOException { mdm.getSubdirectoryManager("newdir"); assertTrue(mdm.hasSubdirectory("newdir")); } @Test(expected = IOException.class) public void subdirectoryManagerConflict() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("dir2"); submdm.getSubdirectoryManager("test.txt"); } @Test public void hasSubdirectory() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("dir1"); assertTrue(submdm.hasSubdirectory("subdir")); assertFalse(submdm.hasFile("subdir")); assertFalse(mdm.hasSubdirectory("dir")); } @Test public void hasFile() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("dir2"); assertTrue(submdm.hasFile("test.txt")); assertFalse(mdm.hasSubdirectory("test.txt")); } @Test public void getSubdirectories() { HashSet mdmDirs = new HashSet<>( mdm.getSubdirectories("dir*")); HashSet expectedDirs = new HashSet<>(); expectedDirs.add("dir1"); expectedDirs.add("dir2"); assertEquals(expectedDirs, mdmDirs); } @Test public void readFile() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("dir2"); String content = MockStringContentFactory .getStringFromInputStream(submdm.getInputStreamForFile( "test.txt", CompressionType.NONE)); assertEquals("Test contents", content); } @Test public void readBz2File() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("anotherdir"); String content = MockStringContentFactory .getStringFromInputStream(submdm.getInputStreamForFile( "test.txt.bz2", CompressionType.BZ2)); assertEquals("Test BZ2 contents\nMore contents", content); } @Test public void readGzipFile() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("anotherdir"); String content = MockStringContentFactory .getStringFromInputStream(submdm.getInputStreamForFile( "test.txt.gz", CompressionType.GZIP)); assertEquals("Test GZIP contents", content); } @Test public void createFileFromInputStream() throws IOException { InputStream inputStream = MockStringContentFactory .newMockInputStream("New stream contents\nMultiple lines"); mdm.createFile("newfile.txt", inputStream); String content = MockStringContentFactory.getStringFromInputStream(mdm .getInputStreamForFile("newfile.txt", CompressionType.NONE)); assertEquals("New stream contents\nMultiple lines", content); } @Test public void createAtomicFileFromInputStream() throws IOException { InputStream inputStream = MockStringContentFactory .newMockInputStream("New stream contents\nMultiple lines"); mdm.createFileAtomic("newfile.txt", inputStream); String content = MockStringContentFactory.getStringFromInputStream(mdm .getInputStreamForFile("newfile.txt", CompressionType.NONE)); assertEquals("New stream contents\nMultiple lines", content); } @Test public void createFileFromString() throws IOException { mdm.createFile("newfile.txt", "New contents"); String content = MockStringContentFactory.getStringFromInputStream(mdm .getInputStreamForFile("newfile.txt", CompressionType.NONE)); assertArrayEquals(MockDirectoryManager .getMockedFileContents(mdm.directory.resolve("newfile.txt")), content.getBytes(StandardCharsets.UTF_8)); assertEquals("New contents", content); } @Test public void createFileUsingOutputstream() throws IOException { OutputStream out = mdm.getOutputStreamForFile("newfile.txt"); BufferedWriter ow = new BufferedWriter(new OutputStreamWriter(out)); ow.write("New contents"); ow.close(); String content = MockStringContentFactory.getStringFromInputStream(mdm .getInputStreamForFile("newfile.txt", CompressionType.NONE)); assertEquals("New contents", content); } @Test public void readFileFails() throws IOException { mdm.setReturnFailingReaders(true); DirectoryManager submdm = mdm.getSubdirectoryManager("dir2"); InputStream in = submdm.getInputStreamForFile("test.txt", CompressionType.NONE); // We do not use @Test(expected = IOException.class) in order to check // if the exception is really thrown at the right moment. boolean exception = false; try { MockStringContentFactory.getStringFromInputStream(in); } catch (IOException e) { exception = true; } assertTrue(exception); } @Test(expected = FileAlreadyExistsException.class) public void createFileConflict() throws IOException { DirectoryManager submdm = mdm.getSubdirectoryManager("dir2"); submdm.createFile("test.txt", "New contents"); } @Test(expected = FileNotFoundException.class) public void fileNotFound() throws IOException { mdm.getInputStreamForFile("test.txt", CompressionType.NONE); } @Test(expected = IOException.class) public void createFileFromStringReadOnly() throws IOException { DirectoryManager mdmReadOnly = new MockDirectoryManager(basePath, false, true); mdmReadOnly.createFile("newfile.txt", "New contents"); } @Test(expected = IOException.class) public void createFileFromInputStreamReadOnly() throws IOException { DirectoryManager mdmReadOnly = new MockDirectoryManager(basePath, false, true); mdmReadOnly.createFile("newfile.txt", MockStringContentFactory.newMockInputStream("content")); } @Test(expected = IOException.class) public void createFileFromInputStreamAtomicReadOnly() throws IOException { DirectoryManager mdmReadOnly = new MockDirectoryManager(basePath, false, true); mdmReadOnly.createFileAtomic("newfile.txt", MockStringContentFactory.newMockInputStream("content")); } @Test(expected = IOException.class) public void getOutputStreamReadOnly() throws IOException { DirectoryManager mdmReadOnly = new MockDirectoryManager(basePath, false, true); mdmReadOnly.getOutputStreamForFile("newfile.txt"); } @Test(expected = IOException.class) public void openInNonexistingDirectoryReadOnly() throws IOException { DirectoryManager mdmReadOnly = new MockDirectoryManager(basePath, false, true); mdmReadOnly.getSubdirectoryManager("doesNotExist"); } } MockWebResourceFetcherTest.java000066400000000000000000000046241444772566300351230ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/java/org/wikidata/wdtk/testingpackage org.wikidata.wdtk.testing; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.io.InputStream; import org.junit.Before; import org.junit.Test; public class MockWebResourceFetcherTest { MockWebResourceFetcher mwrf; @Before public void setUp() throws Exception { mwrf = new MockWebResourceFetcher(); mwrf.setWebResourceContents("http://example.com/test.html", "Line1\nLine2"); } @Test public void inputStreamForHtml() throws IOException { String content = MockStringContentFactory.getStringFromInputStream(mwrf .getInputStreamForUrl("http://example.com/test.html")); assertEquals(content, "Line1\nLine2"); } @Test public void setConcentsFromResource() throws IOException { mwrf.setWebResourceContentsFromResource( "http://example.com/resource.html", "/test.txt", this.getClass()); String content = MockStringContentFactory.getStringFromInputStream(mwrf .getInputStreamForUrl("http://example.com/resource.html")); assertEquals(content, "This file is here\nto test resource loading."); } @Test public void inputStreamForHtmlFails() throws IOException { mwrf.setReturnFailingReaders(true); InputStream in = mwrf .getInputStreamForUrl("http://example.com/test.html"); // We do not use @Test(expected = IOException.class) in order to check // if the exception is really thrown at the right moment. boolean exception = false; try { MockStringContentFactory.getStringFromInputStream(in); } catch (IOException e) { exception = true; } assertTrue(exception); } @Test(expected = IOException.class) public void readOnlyMockedUrls() throws IOException { mwrf.getInputStreamForUrl("http://not-mocked.com"); } } Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/resources/000077500000000000000000000000001444772566300232105ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-testing/src/test/resources/test.txt000066400000000000000000000000531444772566300247260ustar00rootroot00000000000000This file is here to test resource loading.Wikidata-Toolkit-0.14.6/wdtk-util/000077500000000000000000000000001444772566300167305ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/LICENSE.txt000066400000000000000000000261351444772566300205620ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-util/pom.xml000066400000000000000000000013721444772566300202500ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-util jar Wikidata Toolkit Utilities General-purpose utilities used by WDTK org.apache.commons commons-compress ${apacheCommonsCompressVersion} Wikidata-Toolkit-0.14.6/wdtk-util/src/000077500000000000000000000000001444772566300175175ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/000077500000000000000000000000001444772566300204435ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/000077500000000000000000000000001444772566300213645ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/000077500000000000000000000000001444772566300221535ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/000077500000000000000000000000001444772566300237505ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300247215ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/000077500000000000000000000000001444772566300256765ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/CompressionType.java000066400000000000000000000015771444772566300317160ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Enum for denoting several basic file types for which we provide transparent * decompression. * * @author Markus Kroetzsch * */ public enum CompressionType { NONE, GZIP, BZ2 } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/DirectoryManager.java000066400000000000000000000131171444772566300320030ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; /** * Interface for classes that read and write files from one directory. Allows * for mock implementations to test functionality without actually writing * files. * * @author Markus Kroetzsch * */ public interface DirectoryManager { /** * Returns a new directory manager for the subdirectory of the given name. * If the subdirectory does not exist yet, it will be created. If this is * not desired, its existence can be checked with * {@link #hasSubdirectory(String)} first (ignoring the fact that there * might be race conditions when accessing the file system). * * @param subdirectoryName * the string name of the subdirectory * @throws IOException * if directory could not be created * @return DirectoryManager for subdirectory */ DirectoryManager getSubdirectoryManager(String subdirectoryName) throws IOException; /** * Checks if there is a subdirectory of the given name. * * @param subdirectoryName * the name of the subdirectory * @return true if the subdirectory exists */ boolean hasSubdirectory(String subdirectoryName); /** * Checks if there is a file of the given name. * * @param fileName * the name of the file * @return true if the file exists and is not a directory */ boolean hasFile(String fileName); /** * Creates a new file in the current directory, and fill it with the data * from the given input stream. If the stream encodes a string, then it * should generally be encoded in UTF-8, since access methods assume this. * * @param fileName * the name of the file * @param inputStream * the input stream from which to load the file * @return size of the new file in bytes * @throws IOException */ long createFile(String fileName, InputStream inputStream) throws IOException; /** * Creates a new file in the current directory, and fill it with the data * from the given input stream. This is done by first writing the data to a * temporary file that uses a suffix to the file name, and then moving the * completed file to the new location. This should be used when fetching * larger files through from slow stream (e.g., a download) to prevent * partially finished file downloads lying around if the program is * terminated during download. The temporary file will still be lying * around, but it will not be mistaken for the completed download by any * other parts of the program. *

    * If the stream encodes a string, then it should generally be encoded in * UTF-8, since access methods assume this. * * @param fileName * the name of the file * @param inputStream * the input stream from which to load the file * @return size of the new file in bytes * @throws IOException */ long createFileAtomic(String fileName, InputStream inputStream) throws IOException; /** * Creates a new file in the current directory, and fill it with the given * data, encoded in UTF-8. Should only be used for short pieces of data. * * @param fileName * the name of the file * @param fileContents * the data to write into the file * @throws IOException */ void createFile(String fileName, String fileContents) throws IOException; /** * Opens and returns an output stream that can be used to write to the file * of the given name within the current directory. The stream is owned by * the caller and must be closed after use. If the file already exists, it * will be truncated at this operation. * * @param fileName * the name of the file * @return the stream to write to * @throws IOException */ OutputStream getOutputStreamForFile(String fileName) throws IOException; /** * Returns an input stream to access file of the given name within the * current directory, possibly uncompressing it if required. *

    * It is important to close the stream after using it to free memory. * * @param fileName * the name of the file * @param compressionType * for types other than {@link CompressionType#NONE}, the file * will be uncompressed appropriately and the returned input * stream will provide access to the uncompressed content * @return an InputStream to fetch data from the file * @throws IOException */ InputStream getInputStreamForFile(String fileName, CompressionType compressionType) throws IOException; /** * Returns a list of the names of all subdirectories of the base directory. * The glob pattern can be used to filter the names; "*" should be used if * no filtering is desired. * * @param glob * pattern to filter directoy names * @return list of subdirectory names * @throws IOException */ List getSubdirectories(String glob) throws IOException; } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/DirectoryManagerFactory.java000066400000000000000000000063771444772566300333450ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.nio.file.Path; import java.nio.file.Paths; /** * Static helper class for creating {@link DirectoryManager} objects. * * @author Markus Kroetzsch * */ public class DirectoryManagerFactory { /** * The class that will be used for accessing directories. */ static Class dmClass = DirectoryManagerImpl.class; /** * Sets the class of {@link DirectoryManager} that should be used when * creating instances here. This class should provide constructors for * {@link Path} and {@link String} versions of the directory. * * @param clazz * the class to use */ public static void setDirectoryManagerClass( Class clazz) { dmClass = clazz; } /** * Creates a new {@link DirectoryManager} for the given directory path. * * @param path * the directory that the directory manager points to * @param readOnly * if false, the directory manager will attempt to create * directories when changing to a location that does not exist * @return the directory manager * @throws IOException * if there was an IO error constructing the directory manager */ public static DirectoryManager createDirectoryManager(Path path, boolean readOnly) throws IOException { try { return dmClass.getConstructor(Path.class, Boolean.class) .newInstance(path, readOnly); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | NoSuchMethodException | SecurityException e) { throw new RuntimeException(e.toString(), e); } catch (InvocationTargetException e) { if (e.getTargetException() instanceof IOException) { throw (IOException) e.getTargetException(); } else { throw new RuntimeException(e.getTargetException().toString(), e.getTargetException()); } } } /** * Creates a new {@link DirectoryManager} for the given directory. * * @param directory * the directory that the directory manager points to * @param readOnly * if false, the directory manager will attempt to create * directories when changing to a location that does not exist * @return the directory manager * @throws IOException * if there was an IO error constructing the directory manager */ public static DirectoryManager createDirectoryManager(String directory, boolean readOnly) throws IOException { return createDirectoryManager(Paths.get(directory), readOnly); } } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/DirectoryManagerImpl.java000066400000000000000000000173571444772566300326370ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.List; import java.util.zip.GZIPInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; /** * Class to read and write files from one directory. It is guaranteed that the * directory always exists (it is created if needed). * * @author Markus Kroetzsch * */ public class DirectoryManagerImpl implements DirectoryManager { /** * The directory that this object is managing. */ final Path directory; /** * If false, the directory manager will attempt to create directories when * changing to a location that does not exist. */ final boolean readOnly; /** * Constructor * * @param baseDirectory * the directory where the file manager should point initially; * will be created if not existing * @param readOnly * if false, the directory manager will attempt to create * directories when changing to a location that does not exist * @throws IOException * if there was a problem creating the directory */ public DirectoryManagerImpl(Path baseDirectory, Boolean readOnly) throws IOException { this.directory = baseDirectory; this.readOnly = readOnly; createDirectory(this.directory); } @Override public String toString() { return this.directory.toString(); } @Override public DirectoryManager getSubdirectoryManager(String subdirectoryName) throws IOException { return new DirectoryManagerImpl(directory.resolve(subdirectoryName), this.readOnly); } @Override public boolean hasSubdirectory(String subdirectoryName) { Path subdirectoryPath = this.directory.resolve(subdirectoryName); return Files.isDirectory(subdirectoryPath); } @Override public boolean hasFile(String fileName) { Path filePath = this.directory.resolve(fileName); return Files.isRegularFile(filePath) && !Files.isDirectory(filePath); } @Override public long createFile(String fileName, InputStream inputStream) throws IOException { long fileSize; Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); try (ReadableByteChannel readableByteChannel = Channels .newChannel(inputStream); FileChannel fc = FileChannel .open(filePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)) { fileSize = fc.transferFrom(readableByteChannel, 0, Long.MAX_VALUE); } return fileSize; } @Override public long createFileAtomic(String fileName, InputStream inputStream) throws IOException { long fileSize; Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); Path fileTempPath = this.directory.resolve(fileName + ".part"); try (ReadableByteChannel readableByteChannel = Channels .newChannel(inputStream); FileChannel fc = FileChannel.open(fileTempPath, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE)) { fileSize = fc.transferFrom(readableByteChannel, 0, Long.MAX_VALUE); } Files.move(fileTempPath, filePath); return fileSize; } @Override public void createFile(String fileName, String fileContents) throws IOException { Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); try (BufferedWriter bufferedWriter = Files.newBufferedWriter(filePath, StandardCharsets.UTF_8, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)) { bufferedWriter.write(fileContents); } } @Override public OutputStream getOutputStreamForFile(String fileName) throws IOException { Path filePath = this.directory.resolve(fileName); ensureWritePermission(filePath); return Files.newOutputStream(filePath); } @Override public InputStream getInputStreamForFile(String fileName, CompressionType compressionType) throws IOException { Path filePath = this.directory.resolve(fileName); InputStream fileInputStream = Files.newInputStream(filePath, StandardOpenOption.READ); return getCompressorInputStream(fileInputStream, compressionType); } /** * Returns an input stream that applies the required decompression to the * given input stream. * * @param inputStream * the input stream with the (possibly compressed) data * @param compressionType * the kind of compression * @return an input stream with decompressed data * @throws IOException * if there was a problem creating the decompression streams */ protected InputStream getCompressorInputStream(InputStream inputStream, CompressionType compressionType) throws IOException { switch (compressionType) { case NONE: return inputStream; case GZIP: return new GZIPInputStream(inputStream); case BZ2: return new BZip2CompressorInputStream(new BufferedInputStream( inputStream)); default: throw new IllegalArgumentException("Unsupported compression type: " + compressionType); } } @Override public List getSubdirectories(String glob) throws IOException { List result = new ArrayList<>(); try (DirectoryStream directoryStream = Files.newDirectoryStream( this.directory, glob)) { for (Path entry : directoryStream) { if (Files.isDirectory(entry)) { result.add(entry.getFileName().toString()); } } } return result; } /** * Creates a directory at the given path if it does not exist yet and if the * directory manager was not configured for read-only access. * * @param path * @throws IOException * if it was not possible to create a directory at the given * path */ void createDirectory(Path path) throws IOException { if (Files.exists(path) && Files.isDirectory(path)) { return; } if (this.readOnly) { throw new FileNotFoundException( "The requested directory \"" + path.toString() + "\" does not exist and we are in read-only mode, so it cannot be created."); } Files.createDirectory(path); } /** * Throws an exception if the object is in read-only mode. The file path is * only needed for the error message. A detailed check for writability is * not performed (if there is a specific problem for this one path, e.g., * due to missing permissions, an exception will be created in due course * anyway). * * @param writeFilePath * the name of the file we would like to write to * @throws IOException * if in read-only mode */ void ensureWritePermission(Path writeFilePath) throws IOException { if (this.readOnly) { throw new IOException("Cannot write to \"" + writeFilePath.toString() + "\" since we are in read-only mode."); } } } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/NestedIterator.java000066400000000000000000000042431444772566300315000ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Iterator; import java.util.NoSuchElementException; /** * Given an iterable of iterables of T, this class simulates an iterator of T. * For example, it can be used to iterate over every element in a list of lists * of T. *

    * This implementation does not support the removal of elements. * * @author Markus Kroetzsch * * @param */ public class NestedIterator implements Iterator { Iterator> outerIterator; Iterator innerIterator; /** * Constructor. * * @param iterableOfIterables * the nested iterable to iterate over */ public NestedIterator(Iterable> iterableOfIterables) { this.outerIterator = iterableOfIterables.iterator(); advanceOuterIterator(); } @Override public boolean hasNext() { return this.innerIterator != null; } @Override public T next() { if (this.innerIterator == null) { throw new NoSuchElementException(); } T result = this.innerIterator.next(); if (!this.innerIterator.hasNext()) { advanceOuterIterator(); } return result; } @Override public void remove() { throw new UnsupportedOperationException(); } private void advanceOuterIterator() { while ((this.innerIterator == null || !this.innerIterator.hasNext()) && this.outerIterator.hasNext()) { this.innerIterator = this.outerIterator.next().iterator(); } if (this.innerIterator != null && !this.innerIterator.hasNext()) { this.innerIterator = null; } } } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/Timer.java000066400000000000000000000414501444772566300276250ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit utilities * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.builder.HashCodeBuilder; /** * Class for keeping CPU and system times. Timers measure wall clock and/or CPU * times (for specific threads). They can be started and stopped. Times between * these two methods will be recorded and (when starting and stopping more than * once) added up to total times. The number of start-stop measurements is * recorded, and one can also query the average times. Finally, a timer can be * reset. * * There are two main ways of accessing timers: by creating a Timer object * directly or by using a global registry of timers. Registered timers are * identified by their string name and thread id. The global registry is useful * since it makes it much easier to re-integrate measurements taken in many * threads. They also free the caller of the burden of keeping a reference to * the Timer. * * The code in this file was adapted from the ElkTimer class of the ELK reasoner, with contributions from Yevgeny * Kasakov and Pavel Klinov. * * @author Markus Kroetzsch * */ public class Timer { /** Flag for indicating that no times should be taken (just count runs). */ public static final int RECORD_NONE = 0x00000000; /** Flag for indicating that CPU time should be taken. */ public static final int RECORD_CPUTIME = 0x00000001; /** Flag for indicating that wall clock time should be taken. */ public static final int RECORD_WALLTIME = 0x00000002; /** Flag for indicating that all supported times should be taken. */ public static final int RECORD_ALL = RECORD_CPUTIME | RECORD_WALLTIME; /** Object to access CPU times for specific threads. */ static final ThreadMXBean tmxb = ManagementFactory.getThreadMXBean(); /** Registry of named timers. */ static final ConcurrentHashMap registeredTimers = new ConcurrentHashMap<>(); final String name; final long threadId; final int todoFlags; long currentStartCpuTime = -1; long currentStartWallTime = -1; boolean isRunning = false; long totalCpuTime = 0; long totalWallTime = 0; int measurements = 0; int threadCount = 0; /** * Constructor. Every timer is identified by three things: a string name, an * integer for flagging its tasks (todos), and a thread id (long). * * Tasks can be flagged by a disjunction of constants like RECORD_CPUTIME * and RECORD_WALLTIME. Only times for which an according flag is set will * be recorded. * * The thread id can be the actual id of the thread that is measured, or 0 * (invalid id) to not assign the timer to any thread. In this case, no CPU * time measurement is possible since Java does not allow us to measure the * total CPU time across all threads. * * @param name * a string that identifies the timer * @param todoFlags * flags to define what the timer will measure * @param threadId * the id of the thread for measuring CPU time or 0 if not * measuring */ public Timer(String name, int todoFlags, long threadId) { this.name = name; this.todoFlags = todoFlags; this.threadId = threadId; if (!tmxb.isThreadCpuTimeEnabled()) { tmxb.setThreadCpuTimeEnabled(true); } } /** * Constructor. Same as {@link #Timer(String, int, long)}, but using the * current thread instead of a freely specified thread. * * @param name * a string that identifies the timer * @param todoFlags * flags to define what the timer will measure */ public Timer(String name, int todoFlags) { this(name, todoFlags, Thread.currentThread().getId()); } /** * Get the string name of the timer. * * @return string name */ public String getName() { return name; } /** * Get the ID of the thread for which this timer was created. * * @return thread ID */ public long getThreadId() { return threadId; } /** * Return true if the timer is running. * * @return true if running */ public boolean isRunning() { return isRunning; } /** * Get the total recorded CPU time in nanoseconds. * * @return recorded CPU time in nanoseconds */ public long getTotalCpuTime() { return totalCpuTime; } /** * Return the average CPU time across all measurements. * * @return the average CPU time across all measurements */ public long getAvgCpuTime() { if (measurements > 0) { return totalCpuTime / measurements; } else { return 0; } } /** * Get the total recorded wall clock time in nanoseconds. * * @return recorded wall time in nanoseconds */ public long getTotalWallTime() { return totalWallTime; } /** * Return the average wall clock time across all measurements. * * @return the average wall clock time across all measurements */ public long getAvgWallTime() { if (measurements > 0) { return totalWallTime / measurements; } else { return 0; } } /** * Start the timer. */ public synchronized void start() { if ((todoFlags & RECORD_CPUTIME) != 0) { currentStartCpuTime = getThreadCpuTime(threadId); } else { currentStartCpuTime = -1; } if ((todoFlags & RECORD_WALLTIME) != 0) { currentStartWallTime = System.nanoTime(); } else { currentStartWallTime = -1; } isRunning = true; } /** * Stop the timer (if running) and reset all recorded values. */ public synchronized void reset() { currentStartCpuTime = -1; currentStartWallTime = -1; totalCpuTime = 0; totalWallTime = 0; measurements = 0; isRunning = false; threadCount = 0; } /** * Stop the timer and record the times that have passed since its start. The * times that have passed are added to the internal state and can be * retrieved with {@link #getTotalCpuTime()} etc. * * If CPU times are recorded, then the method returns the CPU time that has * passed since the timer was last started; otherwise -1 is returned. * * @return CPU time that the timer was running, or -1 if timer not running * or CPU time unavailable for other reasons */ public synchronized long stop() { long totalTime = -1; if ((todoFlags & RECORD_CPUTIME) != 0 && (currentStartCpuTime != -1)) { long cpuTime = getThreadCpuTime(threadId); if (cpuTime != -1) { // may fail if thread already dead totalTime = cpuTime - currentStartCpuTime; totalCpuTime += totalTime; } } if ((todoFlags & RECORD_WALLTIME) != 0 && (currentStartWallTime != -1)) { long wallTime = System.nanoTime(); totalWallTime += wallTime - currentStartWallTime; } if (isRunning) { measurements += 1; isRunning = false; } currentStartWallTime = -1; currentStartCpuTime = -1; return totalTime; } /** * The implementation of toString() generates a summary of the times * recorded so far. If the timer is still running, then it will not be * stopped to add the currently measured time to the output but a warning * will added. * * @return string description of the timer results and state */ @Override public String toString() { String runningWarning; if (isRunning) { runningWarning = " [timer running!]"; } else { runningWarning = ""; } String timerLabel; if (threadId != 0) { timerLabel = name + " (thread " + threadId + ")"; } else if (threadCount > 1) { timerLabel = name + " (over " + threadCount + " threads)"; } else { timerLabel = name; } if (todoFlags == RECORD_NONE) { return "Timer " + timerLabel + " recorded " + measurements + " run(s); no times taken" + runningWarning; } StringBuilder labels = new StringBuilder(); StringBuilder values = new StringBuilder(); String separator; if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels.append("CPU"); values.append(totalCpuTime / 1000000); separator = "/"; } else { separator = ""; } if ((todoFlags & RECORD_WALLTIME) != 0) { labels.append(separator).append("Wall"); values.append(separator).append(totalWallTime / 1000000); } if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels.append("/CPU avg"); values.append("/").append( (float) (totalCpuTime) / measurements / 1000000); } if ((todoFlags & RECORD_WALLTIME) != 0) { labels.append("/Wall avg"); values.append("/").append( (float) (totalWallTime) / measurements / 1000000); } if (threadCount > 1) { if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels.append("/CPU per thread"); values.append("/").append( (float) (totalCpuTime) / threadCount / 1000000); } if ((todoFlags & RECORD_WALLTIME) != 0) { labels.append("/Wall per thread"); values.append("/").append( (float) (totalWallTime) / threadCount / 1000000); } } return "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values + runningWarning; } /** * Start a timer of the given string name for all todos and the current * thread. If no such timer exists yet, then it will be newly created. * * @param timerName * the name of the timer */ public static void startNamedTimer(String timerName) { getNamedTimer(timerName).start(); } /** * Start a timer of the given string name for the current thread. If no such * timer exists yet, then it will be newly created. * * @param timerName * the name of the timer * @param todoFlags */ public static void startNamedTimer(String timerName, int todoFlags) { getNamedTimer(timerName, todoFlags).start(); } /** * Start a timer of the given string name for the current thread. If no such * timer exists yet, then it will be newly created. * * @param timerName * the name of the timer * @param todoFlags * @param threadId * of the thread to track, or 0 if only system clock should be * tracked */ public static void startNamedTimer(String timerName, int todoFlags, long threadId) { getNamedTimer(timerName, todoFlags, threadId).start(); } /** * Stop a timer of the given string name for all todos and the current * thread. If no such timer exists, -1 will be returned. Otherwise the * return value is the CPU time that was measured. * * @param timerName * the name of the timer * @return CPU time if timer existed and was running, and -1 otherwise */ public static long stopNamedTimer(String timerName) { return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread() .getId()); } /** * Stop a timer of the given string name for the current thread. If no such * timer exists, -1 will be returned. Otherwise the return value is the CPU * time that was measured. * * @param timerName * the name of the timer * @param todoFlags * @return CPU time if timer existed and was running, and -1 otherwise */ public static long stopNamedTimer(String timerName, int todoFlags) { return stopNamedTimer(timerName, todoFlags, Thread.currentThread() .getId()); } /** * Stop a timer of the given string name for the given thread. If no such * timer exists, -1 will be returned. Otherwise the return value is the CPU * time that was measured. * * @param timerName * the name of the timer * @param todoFlags * @param threadId * of the thread to track, or 0 if only system clock should be * tracked * @return CPU time if timer existed and was running, and -1 otherwise */ public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { Timer key = new Timer(timerName, todoFlags, threadId); if (registeredTimers.containsKey(key)) { return registeredTimers.get(key).stop(); } else { return -1; } } /** * Reset a timer of the given string name for all todos and the current * thread. If no such timer exists yet, then it will be newly created. * * @param timerName * the name of the timer */ public static void resetNamedTimer(String timerName) { getNamedTimer(timerName).reset(); } /** * Reset a timer of the given string name for the current thread. If no such * timer exists yet, then it will be newly created. * * @param timerName * the name of the timer * @param todoFlags */ public static void resetNamedTimer(String timerName, int todoFlags) { getNamedTimer(timerName, todoFlags).reset(); } /** * Reset a timer of the given string name for the given thread. If no such * timer exists yet, then it will be newly created. * * @param timerName * the name of the timer * @param todoFlags * @param threadId * of the thread to track, or 0 if only system clock should be * tracked */ public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { getNamedTimer(timerName, todoFlags, threadId).reset(); } /** * Get a timer of the given string name that takes all possible times * (todos) for the current thread. If no such timer exists yet, then it will * be newly created. * * @param timerName * the name of the timer * @return timer */ public static Timer getNamedTimer(String timerName) { return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread() .getId()); } /** * Get a timer of the given string name and todos for the current thread. If * no such timer exists yet, then it will be newly created. * * @param timerName * the name of the timer * @param todoFlags * @return timer */ public static Timer getNamedTimer(String timerName, int todoFlags) { return getNamedTimer(timerName, todoFlags, Thread.currentThread() .getId()); } /** * Get a timer of the given string name for the given thread. If no such * timer exists yet, then it will be newly created. * * @param timerName * the name of the timer * @param todoFlags * @param threadId * of the thread to track, or 0 if only system clock should be * tracked * @return timer */ public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { Timer key = new Timer(timerName, todoFlags, threadId); registeredTimers.putIfAbsent(key, key); return registeredTimers.get(key); } /** * Collect the total times measured by all known named timers of the given * name. This is useful to add up times that were collected across separate * threads. * * @param timerName * @return timer */ public static Timer getNamedTotalTimer(String timerName) { long totalCpuTime = 0; long totalSystemTime = 0; int measurements = 0; int timerCount = 0; int todoFlags = RECORD_NONE; Timer previousTimer = null; for (Map.Entry entry : registeredTimers.entrySet()) { if (entry.getValue().name.equals(timerName)) { previousTimer = entry.getValue(); timerCount += 1; totalCpuTime += previousTimer.totalCpuTime; totalSystemTime += previousTimer.totalWallTime; measurements += previousTimer.measurements; todoFlags |= previousTimer.todoFlags; } } if (timerCount == 1) { return previousTimer; } else { Timer result = new Timer(timerName, todoFlags, 0); result.totalCpuTime = totalCpuTime; result.totalWallTime = totalSystemTime; result.measurements = measurements; result.threadCount = timerCount; return result; } } @Override public int hashCode() { return new HashCodeBuilder(997, 1013).append(name).append(threadId) .append(todoFlags).toHashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Timer)) { return false; } Timer other = (Timer) obj; return (threadId == other.threadId && todoFlags == other.todoFlags && name .equals(other.name)); } /** * Get the current CPU time of the given thread. * * @param threadId * id of the thread to get CPU time for * @return current CPU time in the given thread, or 0 if thread is 0 */ static long getThreadCpuTime(long threadId) { if (threadId == 0) { // generally invalid as a thread id return 0; } else { return tmxb.getThreadCpuTime(threadId); } } } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/WebResourceFetcher.java000066400000000000000000000026241444772566300322730ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; /** * Interface to access files on the Web. Mock implementations can be used for * testing without Web access. * * @author Markus Kroetzsch * */ public interface WebResourceFetcher { /** * Returns an InputStream for the document at the given URL. This can be * used for downloading. The stream should be closed after use. * * @param urlString * the URL of the document * @return InputStream for the requested document * @throws IOException * if the document at the URL could not be opened or the URL was * invalid */ InputStream getInputStreamForUrl(String urlString) throws IOException; }Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/WebResourceFetcherImpl.java000066400000000000000000000066501444772566300331200ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Dump File Handling * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.Proxy; import java.net.URL; import java.net.URLConnection; /** * Standard implementation of {@link WebResourceFetcher}. * * @author Markus Kroetzsch * */ public class WebResourceFetcherImpl implements WebResourceFetcher { protected static String userAgent = "Wikidata Toolkit; Java " + System.getProperty("java.version"); protected static Proxy proxy = null; /** * Returns the proxy that will be used for all requests made by Wikidata * Toolkit. * * @return the proxy represented as java object */ public static Proxy getProxy() { return proxy; } /** * Sets the proxy that will be used for alle requests made by Wikidata * Toolkit. This should be set in own tools based on Wikidata Toolkit esp. * when making large amounts of requests. * * @param proxy * the proxy represented as java object */ public static void setProxy(Proxy proxy) { WebResourceFetcherImpl.proxy = proxy; } /** * Checks whether a proxy is set. * * @return True if a proxy is set, false, if there isn't set any proxy. */ public static boolean hasProxy() { return (proxy != null); } /** * Returns the string that will be used to identify the user agent on all * requests made by Wikidata Toolkit. * * @return the user agent string */ public static String getUserAgent() { return userAgent; } /** * Sets the string that will be used to identify the user agent on all * requests made by Wikidata Toolkit. This should be set in own tools based * on Wikidata Toolkit esp. when making large amounts of requests. * * @param userAgent * the user agent string */ public static void setUserAgent(String userAgent) { WebResourceFetcherImpl.userAgent = userAgent; } /** * Opens a basic URL connection for the given URL and performs basic * configurations. In particular, it will set the User-Agent. The current * proxy settings are also respected. For http(s) URLs, the result is a * {@link HttpURLConnection}. * * @param url * the URL to open * @return the URL connection to access this URL * @throws IOException */ public static URLConnection getUrlConnection(URL url) throws IOException { URLConnection urlConnection; if (hasProxy()) { urlConnection = url.openConnection(proxy); } else { urlConnection = url.openConnection(); } urlConnection.setRequestProperty("User-Agent", userAgent); return urlConnection; } @Override public InputStream getInputStreamForUrl(String urlString) throws IOException { URL url = new URL(urlString); URLConnection urlConnection = getUrlConnection(url); return urlConnection.getInputStream(); } } Wikidata-Toolkit-0.14.6/wdtk-util/src/main/java/org/wikidata/wdtk/util/package-info.java000066400000000000000000000016711444772566300310720ustar00rootroot00000000000000/** * General utility package for Wikidata Toolkit. Utilities * include classes that are so generic that they do not belong * into any particular package, and that do not justify an own * package either. * * @author Markus Kroetzsch * */ package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit utilities * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-util/src/test/000077500000000000000000000000001444772566300204765ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/000077500000000000000000000000001444772566300214175ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/000077500000000000000000000000001444772566300222065ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/000077500000000000000000000000001444772566300240035ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300247545ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/util/000077500000000000000000000000001444772566300257315ustar00rootroot00000000000000DirectoryManagerFactoryTest.java000066400000000000000000000066001444772566300341460ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/utilpackage org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import org.junit.Before; import org.junit.Test; public class DirectoryManagerFactoryTest { public static class TestDirectoryManager implements DirectoryManager { @Override public DirectoryManager getSubdirectoryManager(String subdirectoryName) { return null; } @Override public boolean hasSubdirectory(String subdirectoryName) { return false; } @Override public boolean hasFile(String fileName) { return false; } @Override public long createFile(String fileName, InputStream inputStream) { return 0; } @Override public long createFileAtomic(String fileName, InputStream inputStream) { return 0; } @Override public void createFile(String fileName, String fileContents) { } @Override public OutputStream getOutputStreamForFile(String fileName) { return null; } @Override public InputStream getInputStreamForFile(String fileName, CompressionType compressionType) { return null; } @Override public List getSubdirectories(String glob) { return null; } } @Before public void setup() throws IOException { DirectoryManagerFactory .setDirectoryManagerClass(DirectoryManagerImpl.class); } @Test public void createDirectoryManagerString() throws IOException { Path path = Paths.get(System.getProperty("user.dir")); DirectoryManager dm = DirectoryManagerFactory.createDirectoryManager( System.getProperty("user.dir"), true); assertTrue(dm instanceof DirectoryManagerImpl); DirectoryManagerImpl dmi = (DirectoryManagerImpl) dm; assertTrue(dmi.readOnly); assertEquals(path, dmi.directory); } @Test public void createDefaultDirectoryManagerPath() throws IOException { Path path = Paths.get(System.getProperty("user.dir")); DirectoryManager dm = DirectoryManagerFactory.createDirectoryManager( path, true); assertTrue(dm instanceof DirectoryManagerImpl); DirectoryManagerImpl dmi = (DirectoryManagerImpl) dm; assertTrue(dmi.readOnly); assertEquals(path, dmi.directory); } @Test(expected = RuntimeException.class) public void createDirectoryManagerNoConstructor() throws IOException { DirectoryManagerFactory .setDirectoryManagerClass(TestDirectoryManager.class); DirectoryManagerFactory.createDirectoryManager("/", true); } @Test(expected = IOException.class) public void createDirectoryManagerIoException() throws IOException { DirectoryManagerFactory.createDirectoryManager( "/nonexisting-directory/123456789/hopefully", true); } } Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/util/DirectoryManagerTest.java000066400000000000000000000101351444772566300326730ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.junit.Before; import org.junit.Test; /** * Test for directory manager implementation. We can only test the read-only * operation of this non-mock component, and only to some degree. * * @author Markus Kroetzsch * */ public class DirectoryManagerTest { DirectoryManagerImpl dm; @Before public void setUp() throws Exception { Path path = Paths.get(System.getProperty("user.dir")); dm = new DirectoryManagerImpl(path, true); } @Test public void testToString() { assertEquals(Paths.get(System.getProperty("user.dir")).toString(), dm.toString()); } @Test(expected = IOException.class) public void MissingSubdirectoryReadOnly() throws IOException { dm.getSubdirectoryManager("1 2 3 not a subdirectory that exists in the test system, hopefully"); } @Test(expected = IOException.class) public void OutputStreamReadOnly() throws IOException { dm.getOutputStreamForFile("file.txt"); } @Test(expected = IOException.class) public void NoCreateFileStringReadOnly() throws IOException { dm.createFile("new-test-file.txt", "new contents"); } @Test(expected = IOException.class) public void NoCreateFileInputStreamReadOnly() throws IOException { ByteArrayInputStream in = new ByteArrayInputStream( "new contents".getBytes(StandardCharsets.UTF_8)); dm.createFile("new-test-file.txt", in); } @Test(expected = IOException.class) public void NoCreateFileAtomicInputStreamReadOnly() throws IOException { ByteArrayInputStream in = new ByteArrayInputStream( "new contents".getBytes(StandardCharsets.UTF_8)); dm.createFileAtomic("new-test-file.txt", in); } @Test public void getCompressionInputStreamNone() throws IOException { ByteArrayInputStream in = new ByteArrayInputStream( "new contents".getBytes(StandardCharsets.UTF_8)); assertEquals(in, dm.getCompressorInputStream(in, CompressionType.NONE)); } @Test public void getCompressionInputStreamGzip() throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); OutputStreamWriter ow = new OutputStreamWriter( new GzipCompressorOutputStream(out), StandardCharsets.UTF_8); ow.write("Test data"); ow.close(); ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); InputStream cin = dm.getCompressorInputStream(in, CompressionType.GZIP); assertEquals("Test data", new BufferedReader(new InputStreamReader(cin)).readLine()); } @Test public void getCompressionInputStreamBz2() throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); OutputStreamWriter ow = new OutputStreamWriter( new BZip2CompressorOutputStream(out), StandardCharsets.UTF_8); ow.write("Test data"); ow.close(); ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); InputStream cin = dm.getCompressorInputStream(in, CompressionType.BZ2); assertEquals("Test data", new BufferedReader(new InputStreamReader(cin)).readLine()); } } Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/util/NestedIteratorTest.java000066400000000000000000000047401444772566300323750ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; import org.junit.Test; import static org.junit.Assert.*; public class NestedIteratorTest { @Test public void testIteration() { List list1 = new ArrayList<>(); list1.add("1"); list1.add("2"); List list2 = new ArrayList<>(); list2.add("3"); List list3 = new ArrayList<>(); List list4 = new ArrayList<>(); list4.add("4"); List> listOfLists = new ArrayList<>(); listOfLists.add(list1); listOfLists.add(list2); listOfLists.add(list3); listOfLists.add(list4); NestedIterator nestedIterator = new NestedIterator<>( listOfLists); assertTrue(nestedIterator.hasNext()); assertEquals("1", nestedIterator.next()); assertTrue(nestedIterator.hasNext()); assertEquals("2", nestedIterator.next()); assertTrue(nestedIterator.hasNext()); assertEquals("3", nestedIterator.next()); assertTrue(nestedIterator.hasNext()); assertEquals("4", nestedIterator.next()); assertFalse(nestedIterator.hasNext()); } @Test(expected = UnsupportedOperationException.class) public void removeNotSupported() { NestedIterator nestedIterator = new NestedIterator<>( Collections.singletonList(Collections.singletonList("Test"))); nestedIterator.remove(); } @Test(expected = NoSuchElementException.class) public void iterateBeyondInnerList() { NestedIterator nestedIterator = new NestedIterator<>( Collections.singletonList(Collections.emptyList())); nestedIterator.next(); } @Test(expected = NoSuchElementException.class) public void iterateBeyondOuterList() { NestedIterator nestedIterator = new NestedIterator<>( Collections.emptyList()); nestedIterator.next(); } } Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/util/TimerTest.java000066400000000000000000000236131444772566300305210ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit utilities * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.*; import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; import java.util.Random; import org.junit.Test; public class TimerTest { /** * Base value for the time in microseconds that we allow between our * measured times and what the timer returns. In theory, there is not really * any such time but in practice a sufficiently high value should work. */ static final int TIME_TOLERANCE = 200000; /** * Spend some time computing to be able to measure something. */ void doDummyComputation() { long dummyValue = 0; // We use a random number and a subsequent check to avoid smart // compilers Random rand = new Random(); int seed = rand.nextInt(10) + 1; for (int i = 0; i < 10000000; i++) { dummyValue = 10 + ((31 * (dummyValue + seed)) % 1234567); } if (dummyValue < 10) { throw new RuntimeException( "This never happens, but let's pretend the value matters to avoid this being complied away."); } try { Thread.sleep(50); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void basicTimerOperation() { Timer timer = new Timer("Test timer", Timer.RECORD_ALL); assertEquals(timer.getName(), "Test timer"); long threadId = timer.getThreadId(); assertEquals(timer.getAvgCpuTime(), 0); assertEquals(timer.getAvgWallTime(), 0); ThreadMXBean tmxb = ManagementFactory.getThreadMXBean(); if (!tmxb.isThreadCpuTimeEnabled()) { tmxb.setThreadCpuTimeEnabled(true); } long cpuTime1 = tmxb.getThreadCpuTime(threadId); long wallTime1 = System.nanoTime(); timer.start(); doDummyComputation(); assertTrue("Timer should be running", timer.isRunning()); timer.stop(); cpuTime1 = tmxb.getThreadCpuTime(threadId) - cpuTime1; wallTime1 = System.nanoTime() - wallTime1; assertTrue( "Unrealistic CPU time: " + timer.getTotalCpuTime() + " should be closer to " + cpuTime1, (cpuTime1 - TimerTest.TIME_TOLERANCE) <= timer .getTotalCpuTime() && timer.getTotalCpuTime() <= cpuTime1); assertTrue( "Unrealistic wall time: " + timer.getTotalWallTime() + " should be closer to " + wallTime1, (wallTime1 - 2 * TimerTest.TIME_TOLERANCE) <= timer .getTotalWallTime() && timer.getTotalWallTime() <= wallTime1); long cpuTime2 = tmxb.getThreadCpuTime(threadId); long wallTime2 = System.nanoTime(); timer.start(); doDummyComputation(); timer.stop(); cpuTime1 += tmxb.getThreadCpuTime(threadId) - cpuTime2; wallTime1 += System.nanoTime() - wallTime2; assertTrue( "Unrealistic total CPU time: " + timer.getTotalCpuTime() + " should be closer to " + cpuTime1, (cpuTime1 - 2 * TimerTest.TIME_TOLERANCE) <= timer .getTotalCpuTime() && timer.getTotalCpuTime() <= cpuTime1); assertTrue( "Unrealistic total wall time: " + timer.getTotalWallTime() + " should be closer to " + wallTime1, (wallTime1 - 4 * TimerTest.TIME_TOLERANCE) <= timer .getTotalWallTime() && timer.getTotalWallTime() <= wallTime1); assertEquals(timer.getTotalCpuTime() / 2, timer.getAvgCpuTime()); assertEquals(timer.getTotalWallTime() / 2, timer.getAvgWallTime()); timer.reset(); assertEquals(timer.getTotalCpuTime(), 0); assertEquals(timer.getTotalWallTime(), 0); assertFalse("Timer should not be running", timer.isRunning()); } @Test public void namedTimers() { Timer timerA1 = Timer.getNamedTimer("test timer"); Timer timerA2 = Timer.getNamedTimer("test timer"); Timer timerA3 = Timer.getNamedTimer("test timer", Timer.RECORD_ALL); Timer timerA4 = Timer.getNamedTimer("test timer", Timer.RECORD_ALL, timerA1.getThreadId()); Timer timerCpu = Timer .getNamedTimer("test timer", Timer.RECORD_CPUTIME); Timer timerWall = Timer.getNamedTimer("test timer", Timer.RECORD_WALLTIME); Timer timerNoThread = Timer.getNamedTimer("test timer", Timer.RECORD_ALL, 0); Timer timerNone = Timer.getNamedTimer("test timer none", Timer.RECORD_NONE); Timer timerB = Timer.getNamedTimer("test timer 2"); // Testing Timer equality: assertEquals(timerA1, timerA2); assertEquals(timerA1, timerA3); assertEquals(timerA1, timerA4); assertNotEquals(timerA1, timerCpu); assertNotEquals(timerA1, timerWall); assertNotEquals(timerA1, timerNoThread); assertNotEquals(timerA1, timerB); assertNotEquals(timerA1, this); // Testing start/stop operation: Timer.startNamedTimer("test timer"); Timer.startNamedTimer("test timer", Timer.RECORD_CPUTIME); Timer.startNamedTimer("test timer", Timer.RECORD_WALLTIME); Timer.startNamedTimer("test timer", Timer.RECORD_ALL, 0); doDummyComputation(); Timer.stopNamedTimer("test timer"); Timer.stopNamedTimer("test timer", Timer.RECORD_CPUTIME); Timer.stopNamedTimer("test timer", Timer.RECORD_WALLTIME); Timer.stopNamedTimer("test timer", Timer.RECORD_ALL, 0); assertTrue("Named timer should have measured a non-zero CPU time.", timerA1.getTotalCpuTime() > 0); assertTrue("Named timer should have measured a non-zero wall time.", timerA1.getTotalWallTime() > 0); assertTrue( "Timer for CPU time should have measured a non-zero CPU time.", timerCpu.getTotalCpuTime() > 0); assertEquals("Timer for CPU time should not have measured a wall time.", 0, timerCpu.getTotalWallTime()); assertEquals("Timer for wall time should not have measured a CPU time.", 0, timerWall.getTotalCpuTime()); assertTrue( "Timer for wall time should have measured a non-zero wall time.", timerWall.getTotalWallTime() > 0); assertEquals("Timer without threadId should not have measured a CPU time.", 0, timerNoThread.getTotalCpuTime()); assertTrue( "Timer without threadId should have measured a non-zero wall time.", timerNoThread.getTotalWallTime() > 0); // Testing total timer creation: Timer totalTimer1 = Timer.getNamedTotalTimer("test timer"); // There should be four *distinct* timers of that name assertEquals(totalTimer1.getTotalCpuTime(), timerA1.getTotalCpuTime() + timerCpu.getTotalCpuTime() + timerWall.getTotalCpuTime() + timerNoThread.getTotalCpuTime()); assertEquals(totalTimer1.getTotalWallTime(), timerA1.getTotalWallTime() + timerCpu.getTotalWallTime() + timerWall.getTotalWallTime() + timerNoThread.getTotalWallTime()); Timer totalTimer2 = Timer.getNamedTotalTimer("test timer 2"); // There should be just one timer of that name assertEquals(totalTimer2, timerB); // Testing toString operation assertTrue(timerA1.toString().startsWith( "Time for test timer (thread " + timerA1.getThreadId() + ") for 1 run(s) CPU/Wall/CPU avg/Wall avg (ms):")); assertTrue(timerCpu.toString().startsWith( "Time for test timer (thread " + timerCpu.getThreadId() + ") for 1 run(s) CPU/CPU avg (ms):")); assertTrue(timerWall.toString().startsWith( "Time for test timer (thread " + timerWall.getThreadId() + ") for 1 run(s) Wall/Wall avg (ms):")); assertTrue(totalTimer1.toString().startsWith( "Time for test timer (over 4 threads)")); assertTrue(timerNoThread.toString().startsWith( "Time for test timer for 1 run(s)")); assertEquals(timerNone.toString(), "Timer test timer none (thread " + timerNone.getThreadId() + ") recorded 0 run(s); no times taken"); timerA1.start(); assertTrue(timerA1.toString().endsWith("[timer running!]")); // Testing reset operation: Timer.resetNamedTimer("test timer"); Timer.resetNamedTimer("test timer", Timer.RECORD_CPUTIME); Timer.resetNamedTimer("test timer", Timer.RECORD_WALLTIME); Timer.resetNamedTimer("test timer", Timer.RECORD_ALL, 0); assertEquals("Named timer should have reset CPU time.", 0, timerA1.getTotalCpuTime()); assertEquals("Named timer should have reset wall time.", 0, timerA1.getTotalWallTime()); assertEquals("Timer for CPU time should have reset CPU time.", 0, timerCpu.getTotalCpuTime()); assertEquals("Timer for CPU time should have reset wall time.", 0, timerCpu.getTotalWallTime()); assertEquals("Timer for wall time should have reset CPU time.", 0, timerWall.getTotalCpuTime()); assertEquals("Timer for wall time should have reset wall time.", 0, timerWall.getTotalWallTime()); assertEquals("Timer without threadId should have reset CPU time.", 0, timerNoThread.getTotalCpuTime()); assertEquals("Timer without threadId should have reset wall time.", 0, timerNoThread.getTotalWallTime()); // Testing unregistered timer stop (does not create one): assertEquals(Timer.stopNamedTimer("unknown name"), -1); } @Test public void timerStopReturnValues() { Timer timer1 = new Timer("stop test timer", Timer.RECORD_ALL); Timer timer2 = new Timer("stop test timer wall", Timer.RECORD_WALLTIME); timer1.start(); timer2.start(); doDummyComputation(); long cpuTime1 = timer1.stop(); long cpuTime2 = timer2.stop(); assertEquals(cpuTime1, timer1.getTotalCpuTime()); assertEquals(cpuTime2, -1); long cpuTime3 = timer1.stop(); assertEquals(cpuTime3, -1); } @Test public void enableCpuTimeTaking() { ThreadMXBean tmxb = ManagementFactory.getThreadMXBean(); tmxb.setThreadCpuTimeEnabled(false); Timer timer = new Timer("Test timer", Timer.RECORD_ALL); timer.start(); doDummyComputation(); timer.stop(); assertTrue("Timer should have measured a CPU time.", timer.getTotalCpuTime() > 0); } } Wikidata-Toolkit-0.14.6/wdtk-util/src/test/java/org/wikidata/wdtk/util/WebResourceFetcherTest.java000066400000000000000000000025441444772566300331670ustar00rootroot00000000000000package org.wikidata.wdtk.util; /* * #%L * Wikidata Toolkit Utilities * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.net.InetSocketAddress; import java.net.Proxy; import org.junit.Test; public class WebResourceFetcherTest { @Test public void testSetUserAgent() { WebResourceFetcherImpl.setUserAgent("My user agent"); assertEquals("My user agent", WebResourceFetcherImpl.getUserAgent()); } @Test public void testSetProxy() { Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress( "test.adress", 8080)); WebResourceFetcherImpl.setProxy(proxy); assertTrue(WebResourceFetcherImpl.hasProxy()); assertEquals(proxy, WebResourceFetcherImpl.getProxy()); } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/000077500000000000000000000000001444772566300202435ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/LICENSE.txt000066400000000000000000000261351444772566300220750ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/pom.xml000066400000000000000000000033271444772566300215650ustar00rootroot00000000000000 4.0.0 org.wikidata.wdtk wdtk-parent 0.14.6 wdtk-wikibaseapi jar Wikidata Toolkit Wikibase API Java library to access the Wikibase Web API ${project.groupId} wdtk-datamodel ${project.version} ${project.groupId} wdtk-util ${project.version} ${project.groupId} wdtk-testing ${project.version} test com.squareup.okhttp3 okhttp ${okhttpVersion} com.squareup.okhttp3 okhttp-urlconnection ${okhttpVersion} com.squareup.okhttp3 mockwebserver ${okhttpVersion} test se.akerfeldt okhttp-signpost ${okhttpSignpostVersion} Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/000077500000000000000000000000001444772566300210325ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/000077500000000000000000000000001444772566300217565ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/000077500000000000000000000000001444772566300226775ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/000077500000000000000000000000001444772566300234665ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/000077500000000000000000000000001444772566300252635ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300262345ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/000077500000000000000000000000001444772566300305245ustar00rootroot00000000000000ApiConnection.java000066400000000000000000000457611444772566300340560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.tuple.ImmutablePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.wikibaseapi.apierrors.AssertUserFailedException; import org.wikidata.wdtk.wikibaseapi.apierrors.MaxlagErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorHandler; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import okhttp3.MediaType; import okhttp3.MultipartBody; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; /** * Class to build up and hold a connection to a Wikibase API. * * @author Michael Guenther * @author Antonin Delpeuch * @author Lu Liu */ @JsonIgnoreProperties(ignoreUnknown = true) public abstract class ApiConnection { static final Logger logger = LoggerFactory.getLogger(ApiConnection.class); /** * URL of the API of wikidata.org. */ public final static String URL_WIKIDATA_API = "https://www.wikidata.org/w/api.php"; /** * URL of the API of test.wikidata.org. */ public final static String URL_TEST_WIKIDATA_API = "https://test.wikidata.org/w/api.php"; /** * URL of the API of commons.wikimedia.org. */ public final static String URL_WIKIMEDIA_COMMONS_API = "https://commons.wikimedia.org/w/api.php"; /** * Name of the HTTP parameter to submit an action to the API. */ protected final static String PARAM_ACTION = "action"; /** * Name of the HTTP parameter to submit the requested result format to the * API. */ protected final static String PARAM_FORMAT = "format"; /** * MediaWiki assert parameter to ensure we are editing while logged in. */ protected static final String ASSERT_PARAMETER = "assert"; protected static final MediaType URLENCODED_MEDIA_TYPE = MediaType.parse("application/x-www-form-urlencoded"); /** * URL to access the Wikibase API. */ protected final String apiBaseUrl; /** * True after successful login. */ protected boolean loggedIn = false; /** * User name used to log in. */ protected String username = ""; /** * Map of requested tokens. */ protected final Map tokens; /** * Maximum time to wait for when establishing a connection, in milliseconds. * For negative values, no timeout is set. */ protected int connectTimeout = -1; /** * Maximum time to wait for a server response once the connection was established. * For negative values, no timeout is set. */ protected int readTimeout = -1; /** * Http client used for making requests. */ private OkHttpClient client; /** * Mapper object used for deserializing JSON data. */ private final ObjectMapper mapper = new ObjectMapper(); /** * Creates an object to manage a connection to the Web API of a Wikibase * site. * * @param apiBaseUrl * base URI to the API, e.g., * "https://www.wikidata.org/w/api.php/" */ public ApiConnection(String apiBaseUrl) { this(apiBaseUrl, null); } /** * Creates an object to manage a connection to the Web API of a Wikibase * site. * * @param apiBaseUrl * base URI to the API, e.g., * "https://www.wikidata.org/w/api.php/" * @param tokens * CSRF tokens already acquired by the connection */ public ApiConnection(String apiBaseUrl, Map tokens) { this.apiBaseUrl = apiBaseUrl; this.tokens = tokens != null ? tokens : new HashMap<>(); } /** * Subclasses can customize their own {@link OkHttpClient.Builder} instances. * * An example: *

    	 * 	    return new OkHttpClient.Builder()
    	 * 		        .connectTimeout(5, TimeUnit.MILLISECONDS)
    	 * 		        .readTimeout(5, TimeUnit.MILLISECONDS)
    	 * 		        .cookieJar(...);
    	 * 
    */ protected abstract OkHttpClient.Builder getClientBuilder(); /** * Getter for the apiBaseUrl. */ @JsonProperty("baseUrl") public String getApiBaseUrl() { return apiBaseUrl; } /** * Returns true if a user is logged in. This does not perform * any request to the server: it just returns our own internal state. * To check if our authentication credentials are still considered * valid by the remote server, use {@link ApiConnection#checkCredentials()}. * * @return true if the connection is in a logged in state */ @JsonProperty("loggedIn") public boolean isLoggedIn() { return loggedIn; } /** * Checks that the credentials are still valid for the * user currently logged in. This can fail if (for instance) * the cookies expired, or were invalidated by a logout from * a different client. * * This method queries the API and throws {@link AssertUserFailedException} * if the check failed. This does not update the state of the connection * object. * @throws MediaWikiApiErrorException * @throws IOException */ public void checkCredentials() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); sendJsonRequest("POST", parameters); } /** * Returns the username of the user who is currently logged in. If there is * no user logged in the result is an empty string. * * @return name of the logged in user */ @JsonProperty("username") public String getCurrentUser() { return username; } /** * Returns the map of tokens (such as csrf token and login token) currently used in this connection. */ @JsonProperty("tokens") public Map getTokens() { return Collections.unmodifiableMap(tokens); } /** * Sets the maximum time to wait for when establishing a connection, in milliseconds. * For negative values, no timeout is set. * * @see HttpURLConnection#setConnectTimeout */ public void setConnectTimeout(int timeout) { connectTimeout = timeout; client = null; } /** * Sets the maximum time to wait for a server response once the connection was established, in milliseconds. * For negative values, no timeout is set. * * @see HttpURLConnection#setReadTimeout */ public void setReadTimeout(int timeout) { readTimeout = timeout; client = null; } /** * Maximum time to wait for when establishing a connection, in milliseconds. * For negative values, no timeout is set, which is the default behaviour (for * backwards compatibility). * * @see HttpURLConnection#getConnectTimeout */ @JsonProperty("connectTimeout") public int getConnectTimeout() { return connectTimeout; } /** * Maximum time to wait for a server response once the connection was established. * For negative values, no timeout is set, which is the default behaviour (for backwards * compatibility). * * @see HttpURLConnection#getReadTimeout */ @JsonProperty("readTimeout") public int getReadTimeout() { return readTimeout; } /** * Logs the current user out. * * @throws IOException * @throws MediaWikiApiErrorException */ public abstract void logout() throws IOException, MediaWikiApiErrorException; /** * Return a token of given type. * @param tokenType The kind of token to retrieve like "csrf" or "login" * @return a token * @throws MediaWikiApiErrorException * if MediaWiki returned an error * @throws IOException * if a network error occurred */ String getOrFetchToken(String tokenType) throws IOException, MediaWikiApiErrorException { if (tokens.containsKey(tokenType)) { return tokens.get(tokenType); } String value = fetchToken(tokenType); tokens.put(tokenType, value); // TODO if fetchToken raises an exception, we could try to recover here: // (1) Check if we are still logged in; maybe log in again // (2) If there is another error, maybe just run the operation again return value; } /** * Remove fetched value of given token. */ void clearToken(String tokenType) { tokens.remove(tokenType); } /** * Executes a API query action to get a new token. * The method only executes the action, without doing any * checks first. If errors occur, they are logged and null is returned. * * @param tokenType The kind of token to retrieve like "csrf" or "login" * @return newly retrieved token * @throws IOException * if a network error occurred * @throws MediaWikiApiErrorException * if MediaWiki returned an error when fetching the token */ private String fetchToken(String tokenType) throws IOException, MediaWikiApiErrorException { Map params = new HashMap<>(); params.put(ApiConnection.PARAM_ACTION, "query"); params.put("meta", "tokens"); params.put("type", tokenType); JsonNode root = this.sendJsonRequest("POST", params); return root.path("query").path("tokens").path(tokenType + "token").textValue(); } /** * Sends a request to the API with the given parameters and the given * request method and returns the result JSON tree. It automatically fills the * cookie map with cookies in the result header after the request. * It logs the request warnings and adds makes sure that "format": "json" * parameter is set. * * @param requestMethod * either POST or GET * @param parameters * Maps parameter keys to values. Out of this map the function * will create a query string for the request. * @return API result * @throws IOException * @throws MediaWikiApiErrorException if the API returns an error */ public JsonNode sendJsonRequest(String requestMethod, Map parameters) throws IOException, MediaWikiApiErrorException { return sendJsonRequest(requestMethod, parameters, null); } /** * Sends a request to the API with the given parameters and the given * request method and returns the result JSON tree. It automatically fills the * cookie map with cookies in the result header after the request. * It logs the request warnings and adds makes sure that "format": "json" * parameter is set. * * @param requestMethod * either POST or GET * @param parameters * Maps parameter keys to values. Out of this map the function * will create a query string for the request. * @param files * If GET, this should be null. If POST, this can contain * a list of files to upload, indexed by the parameter to pass them with. * The first component of the pair is the filename exposed to the server, * and the second component is the path to the local file to upload. * Set to null or empty map to avoid uploading any file. * @return API result * @throws IOException * @throws MediaWikiApiErrorException if the API returns an error */ public JsonNode sendJsonRequest(String requestMethod, Map parameters, Map> files) throws IOException, MediaWikiApiErrorException { parameters.put(ApiConnection.PARAM_FORMAT, "json"); if (loggedIn) { parameters.put(ApiConnection.ASSERT_PARAMETER, "user"); } try (InputStream response = sendRequest(requestMethod, parameters, files)) { JsonNode root = this.mapper.readTree(response); this.checkErrors(root); this.logWarnings(root); return root; } } /** * Sends a request to the API with the given parameters and the given * request method and returns the result string. It automatically fills the * cookie map with cookies in the result header after the request. * * Warning: You probably want to use ApiConnection.sendJsonRequest * that execute the request using JSON content format, * throws the errors and logs the warnings. * * @param requestMethod * either POST or GET * @param parameters * Maps parameter keys to values. Out of this map the function * will create a query string for the request. * @param files * If GET, this should be null. If POST, this can contain * a list of files to upload, indexed by the parameter to pass them with. * The first component of the pair is the filename exposed to the server, * and the second component is the path to the local file to upload. * Set to null or empty map to avoid uploading any file. * @return API result * @throws IOException */ public InputStream sendRequest(String requestMethod, Map parameters, Map> files) throws IOException { Request request; String queryString = getQueryString(parameters); if ("GET".equalsIgnoreCase(requestMethod)) { request = new Request.Builder().url(apiBaseUrl + "?" + queryString).build(); } else if ("POST".equalsIgnoreCase(requestMethod)) { RequestBody body; if (files != null && !files.isEmpty()) { MediaType formDataMediaType = MediaType.parse("multipart/form-data"); MultipartBody.Builder builder = new MultipartBody.Builder(); builder.setType(formDataMediaType); parameters.entrySet().stream() .forEach(entry -> builder.addFormDataPart(entry.getKey(), entry.getValue())); files.entrySet().stream() .forEach(entry -> builder.addFormDataPart(entry.getKey(), entry.getValue().getLeft(), RequestBody.create(formDataMediaType,entry.getValue().getRight()))); body = builder.build(); } else { body = RequestBody.create(queryString, URLENCODED_MEDIA_TYPE); } request = new Request.Builder().url(apiBaseUrl).post(body).build(); } else { throw new IllegalArgumentException("Expected the requestMethod to be either GET or POST, but got " + requestMethod); } if (client == null) { buildClient(); } Response response = client.newCall(request).execute(); return Objects.requireNonNull(response.body()).byteStream(); } private void buildClient() { OkHttpClient.Builder builder = getClientBuilder(); if (connectTimeout >= 0) { builder.connectTimeout(connectTimeout, TimeUnit.MILLISECONDS); } if (readTimeout >= 0) { builder.readTimeout(readTimeout, TimeUnit.MILLISECONDS); } client = builder.build(); } /** * Checks if an API response contains an error and throws a suitable * exception in this case. * * @param root * root node of the JSON result * @throws MediaWikiApiErrorException */ protected void checkErrors(JsonNode root) throws MediaWikiApiErrorException { if (root.has("error")) { JsonNode errorNode = root.path("error"); String code = errorNode.path("code").asText("UNKNOWN"); String info = errorNode.path("info").asText("No details provided"); // Special case for the maxlag error since we also want to return // the lag value in the exception thrown if (errorNode.has("lag") && MediaWikiApiErrorHandler.ERROR_MAXLAG.equals(code)) { double lag = errorNode.path("lag").asDouble(); throw new MaxlagErrorException(info, lag); } else { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException(code, info); } } } /** * Extracts and logs any warnings that are returned in an API response. * * @param root * root node of the JSON result */ protected void logWarnings(JsonNode root) { for (String warning : getWarnings(root)) { logger.warn("API warning " + warning); } } /** * Extracts warnings that are returned in an API response. * * @param root * root node of the JSON result */ List getWarnings(JsonNode root) { ArrayList warnings = new ArrayList<>(); if (root.has("warnings")) { JsonNode warningNode = root.path("warnings"); Iterator> moduleIterator = warningNode .fields(); while (moduleIterator.hasNext()) { Map.Entry moduleNode = moduleIterator.next(); Iterator moduleOutputIterator = moduleNode.getValue() .elements(); while (moduleOutputIterator.hasNext()) { JsonNode moduleOutputNode = moduleOutputIterator.next(); if (moduleOutputNode.isTextual()) { warnings.add("[" + moduleNode.getKey() + "]: " + moduleOutputNode.textValue()); } else if (moduleOutputNode.isArray()) { Iterator messageIterator = moduleOutputNode .elements(); while (messageIterator.hasNext()) { JsonNode messageNode = messageIterator.next(); warnings.add("[" + moduleNode.getKey() + "]: " + messageNode.path("html").path("*") .asText(messageNode.toString())); } } else { warnings.add("[" + moduleNode.getKey() + "]: " + "Warning was not understood. Please report this to Wikidata Toolkit. JSON source: " + moduleOutputNode.toString()); } } } } return warnings; } /** * Returns the query string of a URL from a parameter list. * * @param params * Map with parameters * @return query string */ String getQueryString(Map params) { StringBuilder builder = new StringBuilder(); try { boolean first = true; for (Map.Entry entry : params.entrySet()) { if (first) { first = false; } else { builder.append("&"); } builder.append(URLEncoder.encode(entry.getKey(), "UTF-8")); builder.append("="); builder.append(URLEncoder.encode(entry.getValue(), "UTF-8")); } } catch (UnsupportedEncodingException e) { throw new RuntimeException( "Your Java version does not support UTF-8 encoding."); } return builder.toString(); } /** * Builds a string that serializes a list of objects separated by the pipe * character. The toString methods are used to turn objects into strings. * This operation is commonly used to build parameter lists for API * requests. * * @param objects * the objects to implode * @return string of imploded objects */ public static String implodeObjects(Iterable objects) { StringBuilder builder = new StringBuilder(); boolean first = true; for (Object o : objects) { if (first) { first = false; } else { builder.append("|"); } builder.append(o.toString()); } return builder.toString(); } } BasicApiConnection.java000066400000000000000000000301331444772566300350030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.net.*; import java.util.HashMap; import java.util.List; import java.util.Map; import okhttp3.*; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; import org.wikidata.wdtk.wikibaseapi.apierrors.TokenErrorException; import static org.wikidata.wdtk.wikibaseapi.LoginValue.*; /** * A connection to the MediaWiki API established via * standard login with username and password. * * @author Antonin Delpeuch * */ public class BasicApiConnection extends ApiConnection { /** * Password used to log in. */ @JsonIgnore String password = ""; /** * Used for managing and serializing/deserializing cookies. */ private final CookieManager cookieManager; /** * Creates an object to manage a connection to the Web API of a Wikibase * site. * * @param apiBaseUrl * base URI to the API, e.g., * "https://www.wikidata.org/w/api.php/" */ public BasicApiConnection(String apiBaseUrl) { super(apiBaseUrl); cookieManager = new CookieManager(); cookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ALL); } /** * Deserializes an existing BasicApiConnection from JSON. * * @param apiBaseUrl base URL of the API to use, e.g. "https://www.wikidata.org/w/api.php/" * @param cookies map of cookies used for this session * @param username name of the current user * @param loggedIn true if login succeeded. * @param tokens map of tokens used for this session * @param connectTimeout the maximum time to wait for when establishing a connection, in milliseconds * @param readTimeout the maximum time to wait for a server response once the connection was established, in milliseconds */ @JsonCreator protected BasicApiConnection( @JsonProperty("baseUrl") String apiBaseUrl, @JsonProperty("cookies") List cookies, @JsonProperty("username") String username, @JsonProperty("loggedIn") boolean loggedIn, @JsonProperty("tokens") Map tokens, @JsonProperty("connectTimeout") int connectTimeout, @JsonProperty("readTimeout") int readTimeout) { super(apiBaseUrl, tokens); this.username = username; this.loggedIn = loggedIn; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; cookieManager = new CookieManager(); cookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ALL); CookieStore cookieStore = cookieManager.getCookieStore(); // We only deal with apiBaseUrl here. URI uri = URI.create(apiBaseUrl); cookies.stream().map(HttpCookieWrapper::toHttpCookie) .forEach(cookie -> cookieStore.add(uri, cookie)); } @Override protected OkHttpClient.Builder getClientBuilder() { return new OkHttpClient.Builder() .cookieJar(new JavaNetCookieJar(cookieManager)); } /** * Creates an API connection to test.wikidata.org. * * @return {@link BasicApiConnection} */ public static BasicApiConnection getTestWikidataApiConnection() { return new BasicApiConnection(ApiConnection.URL_TEST_WIKIDATA_API); } /** * Creates an API connection to wikidata.org. * * @return {@link BasicApiConnection} */ public static BasicApiConnection getWikidataApiConnection() { return new BasicApiConnection(ApiConnection.URL_WIKIDATA_API); } /** * Creates an API connection to commons.wikimedia.org. * * @return {@link BasicApiConnection} */ public static BasicApiConnection getWikimediaCommonsApiConnection() { return new BasicApiConnection(ApiConnection.URL_WIKIMEDIA_COMMONS_API); } /** * Logs in using the specified user credentials. After successful login, the * API connection remains in a logged in state, and future actions will be * run as a logged in user. * * @param username * the name of the user to log in * @param password * the password of the user * @throws LoginFailedException * if the login failed for some reason */ public void login(String username, String password) throws LoginFailedException { login(username, password, this::confirmLogin); } /** * Logs in using the main user credentials. After successful login, the * API connection remains in a logged in state, and future actions will be * run as a logged in user. * * @param username the name of the main user to log in * @param password the password of the main user * @throws LoginFailedException if the login failed for some reason */ public void clientLogin(String username, String password) throws LoginFailedException { login(username, password, this::confirmClientLogin); } /*** * Login function that contains token logic and a function as parameter * * @param username the name of the user to log in * @param password the password of the user * @param loginFunction the functional interface to log in with * @throws LoginFailedException if the login failed for some reason */ protected void login(String username, String password, ILogin loginFunction) throws LoginFailedException { try { String token = getOrFetchToken("login"); try { loginFunction.login(token, username, password); } catch (NeedLoginTokenException | TokenErrorException e) { // try once more clearToken("login"); token = getOrFetchToken("login"); loginFunction.login(token, username, password); } } catch (IOException | MediaWikiApiErrorException e1) { throw new LoginFailedException(e1.getMessage(), e1); } } /** * Issues a Web API query to confirm that the previous login attempt was * successful, and sets the internal state of the API connection accordingly * in this case. * * @param token * the login token string * @param username * the name of the user that was logged in * @param password * the password used to log in * @throws IOException * @throws LoginFailedException */ protected void confirmLogin(String token, String username, String password) throws IOException, LoginFailedException, MediaWikiApiErrorException { Map params = new HashMap<>(); params.put(PARAM_ACTION, "login"); params.put(PARAM_LOGIN_USERNAME.getLoginText(), username); params.put(PARAM_LOGIN_PASSWORD.getLoginText(), password); params.put(PARAM_LOGIN_TOKEN.getLoginText(), token); JsonNode root = sendJsonRequest("POST", params); String result = root.path("login").path("result").textValue(); if (LOGIN_RESULT_SUCCESS.getLoginText().equals(result)) { this.loggedIn = true; this.username = username; this.password = password; } else { String message = null; if (FAILED.getLoginText().equals(result)) { message = root.path("login").path("reason").textValue(); } if (message == null) { // Not 'FAILED' or no 'reason' node message = LoginValue.of(result).getMessage(result); } logger.warn(message); if (LOGIN_WRONG_TOKEN.getLoginText().equals(result)) { throw new NeedLoginTokenException(message); } else { throw new LoginFailedException(message); } } } /** * Issues a Web API query to confirm that the previous client login attempt was * successful, and sets the internal state of the API connection accordingly * in this case. * * @param token * the login token string * @param username * the name of the main user that was logged in * @param password * the password used to log in * @throws IOException * @throws LoginFailedException */ protected void confirmClientLogin(String token, String username, String password) throws IOException, LoginFailedException, MediaWikiApiErrorException { Map params = new HashMap<>(); params.put(PARAM_ACTION, "clientlogin"); params.put(PARAM_LOGIN_USERNAME.getClientLoginText(), username); params.put(PARAM_LOGIN_PASSWORD.getClientLoginText(), password); params.put(PARAM_LOGIN_TOKEN.getClientLoginText(), token); params.put("loginreturnurl", apiBaseUrl); // isn't really used in this case, but the api requires either this or logincontinue JsonNode root = sendJsonRequest("POST", params); String result = root.path("clientlogin").path("status").textValue(); if ("PASS".equals(result)) { this.loggedIn = true; this.username = username; this.password = password; } else { String messagecode; if ("FAIL".equals(result)) { messagecode = root.path("clientlogin").path("messagecode").textValue(); } else { messagecode = root.path("error").path("code").textValue(); } String message = LoginValue.of(messagecode).getMessage(messagecode); logger.warn(message); if (LOGIN_WRONG_TOKEN.getClientLoginText().equals(messagecode)) { throw new NeedLoginTokenException(message); } else { throw new LoginFailedException(message); } } } /** * Returns the map of cookies currently used in this connection. */ @JsonProperty("cookies") public List getCookies() { return cookieManager.getCookieStore().getCookies(); } /** * Clears the set of cookies. This will cause a logout. * * @throws IOException */ public void clearCookies() throws IOException, MediaWikiApiErrorException { logout(); cookieManager.getCookieStore().removeAll(); } /** * Logs the current user out. * * @throws IOException */ public void logout() throws IOException, MediaWikiApiErrorException { if (this.loggedIn) { Map params = new HashMap<>(); params.put("action", "logout"); params.put("token", getOrFetchToken("csrf")); params.put("format", "json"); // reduce the output sendJsonRequest("POST", params); this.loggedIn = false; this.username = ""; this.password = ""; } } /** * Wrapper for {@link HttpCookie}. * * Used for json deserialization. * * Since {@link HttpCookie} is final, we can't extend it here. */ protected static class HttpCookieWrapper { private HttpCookie httpCookie; @JsonCreator public HttpCookieWrapper(@JsonProperty("name") String name, @JsonProperty("value") String value, @JsonProperty("comment") String comment, @JsonProperty("commentURL") String commentURL, @JsonProperty("domain") String domain, @JsonProperty("maxAge") int maxAge, @JsonProperty("path") String path, @JsonProperty("portlist") String portlist, @JsonProperty("secure") boolean secure, @JsonProperty("httpOnly") boolean httpOnly, @JsonProperty("version") int version, @JsonProperty("discard") boolean discard) { httpCookie = new HttpCookie(name, value); httpCookie.setComment(comment); httpCookie.setCommentURL(commentURL); httpCookie.setDomain(domain); httpCookie.setMaxAge(maxAge); httpCookie.setPath(path); httpCookie.setPortlist(portlist); httpCookie.setSecure(secure); httpCookie.setHttpOnly(httpOnly); httpCookie.setVersion(version); httpCookie.setDiscard(discard); } public HttpCookie toHttpCookie() { return httpCookie; } } /*** * Functional interface for logging in */ private interface ILogin { void login(String token, String username, String password) throws IOException, LoginFailedException, MediaWikiApiErrorException; } } EditingResult.java000066400000000000000000000043431444772566300340760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2023 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Objects; import java.util.OptionalLong; /** * Holds information about a successful edit made via {@link WikibaseDataEditor}. * The state of the entity after edit is not provided here because it is not possible * for WDTK to determine it reliably from the response of the server. Indeed, it is * possible that the data in the entity after the edit differs from the data in the * entity before the edit plus the changes of the edit itself, because it can be that * another edit touched independent parts of the entity. This can happen even if the base * revision id is provided. */ public class EditingResult { private final long revisionId; public EditingResult(long revisionId) { super(); this.revisionId = revisionId; } /** * The identifier of the revision of the last edit made by the editing action, * if any edit was made. */ public OptionalLong getLastRevisionId() { return revisionId == 0 ? OptionalLong.empty() : OptionalLong.of(revisionId); } @Override public int hashCode() { return Objects.hash(revisionId); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; EditingResult other = (EditingResult) obj; return revisionId == other.revisionId; } @Override public String toString() { return "EditingResult [revisionId=" + revisionId + "]"; } } GuidGenerator.java000066400000000000000000000023061444772566300340500ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ public interface GuidGenerator { String STATEMENT_GUID_SEPARATOR = "$"; /** * Generates a fresh statement id. This consists of a first part * with the entity id of the item the statement belongs to, the separator $, plus * a random hash of the form * /^\{?[A-Z\d]{8}-[A-Z\d]{4}-[A-Z\d]{4}-[A-Z\d]{4}-[A-Z\d]{12}\}?\z/ * @param entityId * the entity the statement belongs to * @return a fresh UUID in the required format. */ String freshStatementId(String entityId); } JacksonWbSearchEntitiesResult.java000066400000000000000000000215251444772566300372300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2016 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.List; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** * Jackson implementation of {@link WbSearchEntitiesResult} * * @author Sören Brunk * */ @JsonIgnoreProperties(ignoreUnknown = true) class JacksonWbSearchEntitiesResult implements WbSearchEntitiesResult { /** * Jackson implementation of {@link Match} */ static class JacksonMatch implements Match { @JsonCreator JacksonMatch( @JsonProperty("type") String type, @JsonProperty("language") String language, @JsonProperty("text") String text ) { this.type = type; this.language = language; this.text = text; } /** * The type (field) of the matching term * e.g "entityId", "label" or "alias". */ @JsonProperty("type") private String type; /** * Language of the matching term field. */ @JsonProperty("language") private String language; /** * Text of the matching term. */ @JsonProperty("text") private String text; public String getType() { return type; } public String getLanguage() { return language; } public String getText() { return text; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((language == null) ? 0 : language.hashCode()); result = prime * result + ((text == null) ? 0 : text.hashCode()); result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if(!(obj instanceof JacksonMatch)) { return false; } JacksonMatch other = (JacksonMatch) obj; return Objects.equals(language, other.language) && Objects.equals(text, other.text) && Objects.equals(type, other.type); } } /** * Constructor. Creates an empty object that can be populated during JSON * deserialization. Should only be used by Jackson for this very purpose. */ JacksonWbSearchEntitiesResult() {} /** * The id of the entity that the document refers to. */ @JsonProperty("id") private String entityId; /** * The full concept URI (the site IRI with entity ID). */ @JsonProperty("concepturi") private String conceptUri; /** * The URL of the wiki site that shows the concept. */ @JsonProperty("url") private String url; /** * Title of the entity (currently is the same as the entity ID). */ @JsonProperty("title") private String title; /** * The internal Mediawiki pageid of the entity. */ @JsonProperty("pageid") private long pageId; /** * Label of the entity * * The language of the returned label depends on the HTTP * * Accept-Language header or the uselang URL parameter. */ @JsonProperty("label") private String label; /** * Description of the entity * * The language of the returned description depends on the HTTP * * Accept-Language header or the uselang URL parameter. */ @JsonProperty("description") private String description; /** * Detailed information about how a document matched the query */ @JsonProperty("match") private JacksonMatch match; /** * A list of alias labels (returned only when an alias matched the query) */ @JsonProperty("aliases") private List aliases; public void setEntityId(String id) { this.entityId = id; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getEntityId() */ @Override public String getEntityId() { return this.entityId; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getConceptUri() */ @Override public String getConceptUri() { return conceptUri; } public void setConceptUri(String conceptUri) { this.conceptUri = conceptUri; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getUrl() */ @Override public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getTitle() */ @Override public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getPageId() */ @Override public long getPageId() { return pageId; } public void setPageId(long pageId) { this.pageId = pageId; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getLabel() */ @Override public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getDescription() */ @Override public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getMatch() */ @Override public Match getMatch() { return match; } public void setMatch(JacksonMatch match) { this.match = match; } /* (non-Javadoc) * @see org.wikidata.wdtk.wikibaseapi.IWbSearchInterfaceResult#getAliases() */ @Override public List getAliases() { return aliases; } public void setAliases(List aliases) { this.aliases = aliases; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((aliases == null) ? 0 : aliases.hashCode()); result = prime * result + ((conceptUri == null) ? 0 : conceptUri.hashCode()); result = prime * result + ((description == null) ? 0 : description.hashCode()); result = prime * result + ((entityId == null) ? 0 : entityId.hashCode()); result = prime * result + ((label == null) ? 0 : label.hashCode()); result = prime * result + ((match == null) ? 0 : match.hashCode()); result = prime * result + (int) (pageId ^ (pageId >>> 32)); result = prime * result + ((title == null) ? 0 : title.hashCode()); result = prime * result + ((url == null) ? 0 : url.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if(!(obj instanceof JacksonWbSearchEntitiesResult)) { return false; } JacksonWbSearchEntitiesResult other = (JacksonWbSearchEntitiesResult) obj; return Objects.equals(aliases, other.aliases) && Objects.equals(conceptUri, other.conceptUri) && Objects.equals(description, other.description) && Objects.equals(entityId, other.entityId) && Objects.equals(label, other.label) && Objects.equals(match, other.match) && pageId == other.pageId && Objects.equals(title, other.title) && Objects.equals(url, other.url); } } LoginFailedException.java000066400000000000000000000021311444772566300353410ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * This exception is thrown when a login was not successful. * * @author Michael Guenther * */ public class LoginFailedException extends Exception { public LoginFailedException(String message) { super(message); } public LoginFailedException(String message, Throwable cause) { super(message, cause); } private static final long serialVersionUID = -211632440125669875L; } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/LoginValue.java000066400000000000000000000120511444772566300334330ustar00rootroot00000000000000package org.wikidata.wdtk.wikibaseapi; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2022 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Arrays; import java.util.Optional; public enum LoginValue { /** * Name of the HTTP parameter to submit a password to the API. */ PARAM_LOGIN_USERNAME("lgname", "username", ""), /** * Name of the HTTP parameter to submit a password to the API. */ PARAM_LOGIN_PASSWORD("lgpassword", "password", ""), /** * Name of the HTTP parameter to submit a login token to the API. */ PARAM_LOGIN_TOKEN("lgtoken", "logintoken", ""), /** * String value in the result field of the JSON response if the login was * successful. */ LOGIN_RESULT_SUCCESS("Success", "PASS", ""), /** * String value in the result field of the JSON response if the password was * wrong. */ LOGIN_WRONG_PASS("WrongPass", "wrongpassword", "Wrong Password."), /** * String value in the result field of the JSON response if the password was * wrong. */ FAILED("Failed", "failed", "Something went wrong"), /** * String value in the result field of the JSON response if the password was * rejected by an authentication plugin. */ LOGIN_WRONG_PLUGIN_PASS("WrongPluginPass", "wrongpluginpass", "Wrong Password. An authentication plugin rejected the password."), // not sure about this one /** * String value in the result field of the JSON response if no username was * given. */ LOGIN_NO_NAME("NoName", "authmanager-authn-no-primary", "No user name given."), /** * String value in the result field of the JSON response if given username * does not exist. */ LOGIN_NOT_EXISTS("NotExists", "wrongpassword", "Username does not exist."), // no distinction for clientLogin /** * String value in the result field of the JSON response if the username is * illegal. */ LOGIN_ILLEGAL("Illegal", "wrongpassword", "Username is illegal."), // no distinction for clientLogin /** * String value in the result field of the JSON response if there were too * many logins in a short time. */ LOGIN_THROTTLED("Throttled", "throttled", "Too many login attempts in a short time."), // not sure about this one /** * String value in the result field of the JSON response if password is * empty. */ LOGIN_EMPTY_PASS("EmptyPass", "authmanager-authn-no-primary", "Password is empty."), /** * String value in the result field of the JSON response if the wiki tried * to automatically create a new account for you, but your IP address has * been blocked from account creation. */ LOGIN_CREATE_BLOCKED("CreateBlocked", "createblocked", "The wiki tried to automatically create a new account for you, " + "but your IP address has been blocked from account creation."), // not sure about this one /** * String value in the result field of the JSON response if the user is * blocked. */ LOGIN_BLOCKED("Blocked", "blocked", "User is blocked."), // not sure about this one /** * String value in the result field of the JSON response if token or session * ID is missing. */ LOGIN_NEEDTOKEN("NeedToken", "missingparam", "Token or session ID is missing."), /** * String value in the result field of the JSON response if token is wrong. */ LOGIN_WRONG_TOKEN("WrongToken", "badtoken", "Token is wrong."), /** * Value for unknown response text */ UNKNOWN("unknown", "unknown", "Error text not recognized"); private final String loginText; private final String clientLoginText; private final String message; LoginValue(String loginText, String clientLoginText, String message) { this.loginText = loginText; this.clientLoginText = clientLoginText; this.message = message; } public static LoginValue of(String text) { Optional optionalLoginValue = Arrays.stream(LoginValue.values()) .filter(loginValue -> text != null && (text.equals(loginValue.loginText) || text.equals(loginValue.clientLoginText))).findFirst(); return optionalLoginValue.orElse(UNKNOWN); } public String getLoginText() { return loginText; } public String getClientLoginText() { return clientLoginText; } public String getMessage(String loginType) { return loginType + ": " + message; } } MalformedResponseException.java000066400000000000000000000034371444772566300366230ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2021 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.core.JsonProcessingException; /** * Exception thrown when response from Wikibase API cannot be parsed. * This exception may be thrown in addition to other {@link JsonProcessingException} exceptions. */ public class MalformedResponseException extends JsonProcessingException { private static final long serialVersionUID = -4697019897395095678L; /** * Constructs {@code MalformedResponseException} with the specified detail * message. * * @param message the detail message, which can be later retrieved via * {@link #getMessage()} */ public MalformedResponseException(String message) { super(message); } /** * Constructs {@code MalformedResponseException} with the specified detail * message and cause. * * @param message the detail message, which can be later retrieved via * {@link #getMessage()} * @param cause the cause, which can be later retrieved via * {@link #getCause()} */ public MalformedResponseException(String message, Throwable cause) { super(message, cause); } } MediaInfoIdQueryAction.java000066400000000000000000000103101444772566300355770ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2020 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import java.io.IOException; import java.util.*; /** * Action for MediaInfoId retrieval. * * @author Lu Liu */ public class MediaInfoIdQueryAction { private final ApiConnection connection; private final String siteIri; public MediaInfoIdQueryAction(ApiConnection connection, String siteIri) { this.connection = connection; this.siteIri = siteIri; } /** * Fetches the MediaInfoIds of files with the given names. *

    * This method only works with file name (e.g. "File:Albert Einstein Head.jpg"). * The "File:" prefix can be omitted, in this case, it will be automatically added during processing. * For example, "Albert Einstein Head.jpg" will be processed as "File:Albert Einstein Head.jpg". *

    * Notice that pages other than file pages will also be fitted with the "File:" prefix. * For example, "Main Page" will be processed as "File:Main Page", which doesn't exist. * So always make sure you are dealing with file name. * * @param fileNames list of file names of the requested MediaInfoIds * @return map from file names for which data could be found to the MediaInfoIds * that were retrieved */ public Map getMediaInfoIds(List fileNames) throws IOException, MediaWikiApiErrorException { // file name => file name with prefix List fileNamesWithPrefix = new ArrayList<>(); for (String fileName : fileNames) { fileName = fileName.startsWith("File:") ? fileName : "File:" + fileName; fileNamesWithPrefix.add(fileName); } Map parameters = new HashMap<>(); parameters.put(ApiConnection.PARAM_ACTION, "query"); parameters.put("titles", ApiConnection.implodeObjects(fileNamesWithPrefix)); Map result = new HashMap<>(); JsonNode root = connection.sendJsonRequest("POST", parameters); if (!root.has("query")) return result; // empty query JsonNode query = root.get("query"); // file name with prefix => normalized file name Map normalizedMap = new HashMap<>(); if (query.has("normalized")) { ArrayNode normalized = (ArrayNode) query.get("normalized"); Iterator iterator = normalized.elements(); while (iterator.hasNext()) { JsonNode next = iterator.next(); String from = next.get("from").asText(); String to = next.get("to").asText(); normalizedMap.put(from, to); } } // normalized file name => Mid Map midMap = new HashMap<>(); JsonNode pages = query.get("pages"); Iterator> iterator = pages.fields(); while (iterator.hasNext()) { Map.Entry page = iterator.next(); String pageId = page.getKey(); String title = page.getValue().get("title").textValue(); if (!pageId.startsWith("-")) { // negative keys such as "-1", "-2", ... mean not found midMap.put(title, Datamodel.makeMediaInfoIdValue("M" + pageId, siteIri)); } } for (String fileName : fileNames) { String fileNameWithPrefix = fileName.startsWith("File:") ? fileName : "File:" + fileName; String normalizedFileName = normalizedMap.getOrDefault(fileNameWithPrefix, fileNameWithPrefix); result.put(fileName, midMap.get(normalizedFileName)); } return result; } } NeedLoginTokenException.java000066400000000000000000000021051444772566300360320ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * This exception could be caused by a login action if there are problems with * the login token or the session id. * * @author Michael Guenther * */ public class NeedLoginTokenException extends LoginFailedException { public NeedLoginTokenException(String message) { super(message); } private static final long serialVersionUID = 4379408974690967477L; } OAuthApiConnection.java000066400000000000000000000174151444772566300350120ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2020 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; import okhttp3.OkHttpClient; import org.wikidata.wdtk.wikibaseapi.apierrors.AssertUserFailedException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import se.akerfeldt.okhttp.signpost.OkHttpOAuthConsumer; import se.akerfeldt.okhttp.signpost.SigningInterceptor; import java.io.IOException; import java.util.HashMap; import java.util.Map; /** * A connection to the MediaWiki/Wikibase API which uses OAuth * for authentication. * * @author Antonin Delpeuch * @author Lu Liu */ public class OAuthApiConnection extends ApiConnection { private String consumerKey; private String consumerSecret; private String accessToken; private String accessSecret; /** * Constructs an OAuth connection to the given MediaWiki API endpoint. *

    * {@link ApiConnection#isLoggedIn()} will return true * until {@link ApiConnection#logout()} is called. *

    * NOTICE: The constructor doesn't check if the OAuth credentials * (i.e., the consumer key/secret and the access token/secret) are valid. * Even if the credentials are valid when calling this constructor, * they can be revoked by the user at any time. *

    * The validity of the credentials is automatically checked if you use * {@link ApiConnection#sendJsonRequest}. * * @param apiBaseUrl the MediaWiki API endpoint, such as "https://www.wikidata.org/w/api.php" * @param consumerKey the OAuth 1.0a consumer key * @param consumerSecret the OAuth 1.0a consumer secret * @param accessToken the access token obtained via the OAuth process * @param accessSecret the secret key obtained via the OAuth process */ public OAuthApiConnection(String apiBaseUrl, String consumerKey, String consumerSecret, String accessToken, String accessSecret) { super(apiBaseUrl, null); this.consumerKey = consumerKey; this.consumerSecret = consumerSecret; this.accessToken = accessToken; this.accessSecret = accessSecret; loggedIn = true; } /** * Deserializes an existing OAuthApiConnection from JSON. * * @param apiBaseUrl the MediaWiki API endpoint, such as "https://www.wikidata.org/w/api.php" * @param consumerKey the OAuth 1.0a consumer key * @param consumerSecret the OAuth 1.0a consumer secret * @param accessToken the access token obtained via the OAuth process * @param accessSecret the secret key obtained via the OAuth process * @param username name of the current user * @param loggedIn true if login succeeded. * @param tokens map of tokens used for this session * @param connectTimeout the maximum time to wait for when establishing a connection, in milliseconds * @param readTimeout the maximum time to wait for a server response once the connection was established, in milliseconds */ @JsonCreator protected OAuthApiConnection( @JsonProperty("baseUrl") String apiBaseUrl, @JsonProperty("consumerKey") String consumerKey, @JsonProperty("consumerSecret") String consumerSecret, @JsonProperty("accessToken") String accessToken, @JsonProperty("accessSecret") String accessSecret, @JsonProperty("username") String username, @JsonProperty("loggedIn") boolean loggedIn, @JsonProperty("tokens") Map tokens, @JsonProperty("connectTimeout") int connectTimeout, @JsonProperty("readTimeout") int readTimeout) { super(apiBaseUrl, tokens); this.consumerKey = consumerKey; this.consumerSecret = consumerSecret; this.accessToken = accessToken; this.accessSecret = accessSecret; this.username = username; this.loggedIn = loggedIn; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; } @Override protected OkHttpClient.Builder getClientBuilder() { OkHttpOAuthConsumer consumer = new OkHttpOAuthConsumer(consumerKey, consumerSecret); consumer.setTokenWithSecret(accessToken, accessSecret); return new OkHttpClient.Builder() .addInterceptor(new SigningInterceptor(consumer)); } /** * Forgets the OAuth credentials locally. * No requests will be made. */ @Override public void logout() { consumerKey = null; consumerSecret = null; accessToken = null; accessSecret = null; username = ""; loggedIn = false; } /** * Checks if the OAuth credentials (i.e., consumer key/secret and access token/secret) are still valid. *

    * The OAuth credentials can be invalid if the user invoked it. *

    * We simply call {@link ApiConnection#checkCredentials()} here. * Because for OAuth, the query "action=query&assert=user" returns success * if and only if the credentials are still valid. This behaviour is the * same when using username/password for logging in. *

    * This method throws {@link AssertUserFailedException} if the check failed. * This does not update the state of the connection object. * * @throws MediaWikiApiErrorException if the check failed * @throws IOException */ @Override public void checkCredentials() throws IOException, MediaWikiApiErrorException { super.checkCredentials(); } @Override @JsonProperty("username") public String getCurrentUser() { if (!loggedIn) return ""; if (username != null && !username.equals("")) return username; try { Map params = new HashMap<>(); params.put(PARAM_ACTION, "query"); params.put("meta", "userinfo"); JsonNode root = sendJsonRequest("POST", params); JsonNode nameNode = root.path("query").path("userinfo").path("name"); if (nameNode.isMissingNode()) { throw new AssertUserFailedException("The path \"query/userinfo/name\" doesn't exist in the json response"); } username = nameNode.textValue(); } catch (IOException | MediaWikiApiErrorException e) { logger.warn("An error occurred when retrieving the username with OAuth credentials, the username is set to \"\" automatically: " + e.getMessage()); username = ""; } return username; } @JsonProperty("consumerKey") public String getConsumerKey() { return consumerKey; } @JsonProperty("consumerSecret") public String getConsumerSecret() { return consumerSecret; } @JsonProperty("accessToken") public String getAccessToken() { return accessToken; } @JsonProperty("accessSecret") public String getAccessSecret() { return accessSecret; } } RandomGuidGenerator.java000066400000000000000000000027451444772566300352200ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.UUID; /** * Generates fresh hashes for new statements, snaks or references, * based on Java's own random generator (java.util.UUID). * * @author antonin * */ public class RandomGuidGenerator implements GuidGenerator { /** * Generates a fresh statement id. This consists of a first part * with the entity id of the item the statement belongs to, plus * a random hash of the form * /^\{?[A-Z\d]{8}-[A-Z\d]{4}-[A-Z\d]{4}-[A-Z\d]{4}-[A-Z\d]{12}\}?\z/ * @param entityId * the entity the statement belongs to * @return a fresh UUID in the required format. */ @Override public String freshStatementId(String entityId) { String uuid = UUID.randomUUID().toString().toUpperCase(); return entityId + STATEMENT_GUID_SEPARATOR + uuid; } } StatementUpdate.java000066400000000000000000000527261444772566300344330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.helpers.JsonSerializer; import org.wikidata.wdtk.datamodel.implementation.StatementImpl; import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.SnakGroup; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * @deprecated Use {@link WikibaseDataEditor#editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Class to plan a statement update operation. * * @author Markus Kroetzsch * */ @Deprecated public class StatementUpdate { static final Logger logger = LoggerFactory.getLogger(StatementUpdate.class); /** * Helper class to store a statement together with the information of * whether or not it is new (modified, not in current data) and therefore * needs to be written. * * @author Markus Kroetzsch * */ static class StatementWithUpdate { public final Statement statement; public final boolean write; public StatementWithUpdate(Statement statement, boolean write) { this.statement = statement; this.write = write; } } /** * Helper class to ease serialization of deleted statements. Jackson will * serialize this class according to the format required by the API. * * @author antonin */ static class DeletedStatement implements Statement { private String id; public DeletedStatement(String id) { this.id = id; } @Override @JsonIgnore public Claim getClaim() { return null; } @Override @JsonIgnore public EntityIdValue getSubject() { return null; } @Override @JsonIgnore public Snak getMainSnak() { return null; } @Override @JsonIgnore public List getQualifiers() { return null; } @Override @JsonIgnore public Iterator getAllQualifiers() { return null; } @Override @JsonIgnore public StatementRank getRank() { return null; } @Override @JsonIgnore public List getReferences() { return null; } @Override @JsonProperty("id") public String getStatementId() { return id; } @Override @JsonIgnore public Value getValue() { return null; } @JsonProperty("remove") public String getRemoveCommand() { return ""; } @Override public Statement withStatementId(String id) { return null; } } private GuidGenerator guidGenerator = new RandomGuidGenerator(); private final ObjectMapper mapper; @JsonIgnore final HashMap> toKeep; @JsonIgnore final List toDelete; @JsonIgnore StatementDocument currentDocument; /** * Constructor. Marks the given lists of statements for being added to or * deleted from the given document, respectively. The current content of the * document is compared with the requested changes to avoid duplicates * (merging references of duplicate statements), and to avoid deletions of * statements that have changed or ceased to exist. * * @param currentDocument * the document with the current statements * @param addStatements * the list of new statements to be added * @param deleteStatements * the list of statements to be deleted */ public StatementUpdate(StatementDocument currentDocument, List addStatements, List deleteStatements) { this.currentDocument = currentDocument; this.toKeep = new HashMap<>(); this.toDelete = new ArrayList<>(); markStatementsForUpdate(currentDocument, addStatements, deleteStatements); this.mapper = new DatamodelMapper(currentDocument.getEntityId().getSiteIri()); } /** * Returns a JSON serialization of the marked insertions and deletions of * statements, in the format required by the Wikibase "wbeditentity" action. * * @return JSON serialization of updates */ @JsonIgnore public String getJsonUpdateString() { try { return mapper.writeValueAsString(this); } catch (JsonProcessingException e) { return ("Failed to serialize statement update to JSON: " + e.toString()); } } /** * Performs the update, selecting the appropriate API action depending on * the nature of the change. * * @param action * the endpoint to which the change should be pushed * @param editAsBot * if true, the edit will be flagged as a "bot edit" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * Ignored if null or empty. * @return the new document after update with the API * @throws MediaWikiApiErrorException * @throws IOException */ public StatementDocument performEdit(WbEditingAction action, boolean editAsBot, String summary, List tags) throws IOException, MediaWikiApiErrorException { if (isEmptyEdit()) { return currentDocument; } else if (toDelete.isEmpty() && getUpdatedStatements().size() == 1) { // we can use "wbsetclaim" because we only have one statement to change List statements = getUpdatedStatements(); Statement statement = statements.get(0); if (statement.getStatementId() == null || statement.getStatementId().isEmpty()) { statement = statement.withStatementId(guidGenerator.freshStatementId(currentDocument.getEntityId().getId())); } JsonNode response = action.wbSetClaim( JsonSerializer.getJsonString(statement), editAsBot, currentDocument.getRevisionId(), summary, tags); StatementImpl.PreStatement preStatement = getDatamodelObjectFromResponse(response, Collections.singletonList("claim"), StatementImpl.PreStatement.class); Statement returnedStatement = preStatement.withSubject(statement.getClaim().getSubject()); long revisionId = getRevisionIdFromResponse(response); return currentDocument.withStatement(returnedStatement).withRevisionId(revisionId); } else if (!toDelete.isEmpty() && getUpdatedStatements().size() == toDelete.size() && toDelete.size() <= 50) { // we can use "wbremoveclaims" because we are only removing statements JsonNode response = action.wbRemoveClaims(toDelete, editAsBot, currentDocument.getRevisionId(), summary, tags); long revisionId = getRevisionIdFromResponse(response); return currentDocument.withoutStatementIds(new HashSet<>(toDelete)).withRevisionId(revisionId); } else { return (StatementDocument) action.wbEditEntity(currentDocument .getEntityId().getId(), null, null, null, getJsonUpdateString(), false, editAsBot, currentDocument .getRevisionId(), summary, tags); } } @JsonProperty("claims") @JsonInclude(Include.NON_EMPTY) public List getUpdatedStatements() { List updatedStatements = new ArrayList<>(); for (List swus : toKeep.values()) { for (StatementWithUpdate swu : swus) { if (!swu.write) { continue; } updatedStatements.add(swu.statement); } } for (String id : toDelete) { updatedStatements.add(new DeletedStatement(id)); } return updatedStatements; } /** * Returns true when the edit is not going to change anything on the item. * In this case, the change can be safely skipped, except if the side effects * of a null edit are desired. */ @JsonIgnore public boolean isEmptyEdit() { return getUpdatedStatements().isEmpty(); } /** * Marks the given lists of statements for being added to or deleted from * the given document, respectively. The current content of the document is * compared with the requested changes to avoid duplicates (merging * references of duplicate statements), and to avoid deletions of statements * that have changed or ceased to exist. * * @param currentDocument * the document with the current statements * @param addStatements * the list of new statements to be added * @param deleteStatements * the list of statements to be deleted */ protected void markStatementsForUpdate(StatementDocument currentDocument, List addStatements, List deleteStatements) { markStatementsForDeletion(currentDocument, deleteStatements); markStatementsForInsertion(currentDocument, addStatements); } /** * Marks the given list of statements for deletion. It is verified that the * current document actually contains the statements before doing so. This * check is based on exact statement equality, including qualifier order and * statement id. * * @param currentDocument * the document with the current statements * @param deleteStatements * the list of statements to be deleted */ protected void markStatementsForDeletion(StatementDocument currentDocument, List deleteStatements) { for (Statement statement : deleteStatements) { boolean found = false; for (StatementGroup sg : currentDocument.getStatementGroups()) { if (!sg.getProperty().equals(statement.getMainSnak().getPropertyId())) { continue; } Statement changedStatement = null; for (Statement existingStatement : sg) { if (existingStatement.equals(statement)) { found = true; toDelete.add(statement.getStatementId()); } else if (existingStatement.getStatementId().equals( statement.getStatementId())) { // (we assume all existing statement ids to be nonempty // here) changedStatement = existingStatement; break; } } if (!found) { StringBuilder warning = new StringBuilder(); warning.append("Cannot delete statement (id ") .append(statement.getStatementId()) .append(") since it is not present in data. Statement was:\n") .append(statement); if (changedStatement != null) { warning.append( "\nThe data contains another statement with the same id: maybe it has been edited? Other statement was:\n") .append(changedStatement); } logger.warn(warning.toString()); } } } } /** * Marks a given list of statements for insertion into the current document. * Inserted statements can have an id if they should update an existing * statement, or use an empty string as id if they should be added. The * method removes duplicates and avoids unnecessary modifications by * checking the current content of the given document before marking * statements for being written. * * @param currentDocument * the document with the current statements * @param addStatements * the list of new statements to be added */ protected void markStatementsForInsertion( StatementDocument currentDocument, List addStatements) { for (Statement statement : addStatements) { addStatement(statement, true); } for (StatementGroup sg : currentDocument.getStatementGroups()) { if (this.toKeep.containsKey(sg.getProperty())) { for (Statement statement : sg) { if (!this.toDelete.contains(statement.getStatementId())) { addStatement(statement, false); } } } } } /** * Adds one statement to the list of statements to be kept, possibly merging * it with other statements to be kept if possible. When two existing * statements are merged, one of them will be updated and the other will be * marked for deletion. * * @param statement * statement to add * @param isNew * if true, the statement should be marked for writing; if false, * the statement already exists in the current data and is only * added to remove duplicates and avoid unnecessary writes */ protected void addStatement(Statement statement, boolean isNew) { PropertyIdValue pid = statement.getMainSnak().getPropertyId(); // This code maintains the following properties: // (1) the toKeep structure does not contain two statements with the // same statement id // (2) the toKeep structure does not contain two statements that can // be merged if (this.toKeep.containsKey(pid)) { List statements = this.toKeep.get(pid); for (int i = 0; i < statements.size(); i++) { Statement currentStatement = statements.get(i).statement; boolean currentIsNew = statements.get(i).write; if (!"".equals(currentStatement.getStatementId()) && currentStatement.getStatementId().equals( statement.getStatementId())) { // Same, non-empty id: ignore existing statement as if // deleted return; } Statement newStatement = mergeStatements(statement, currentStatement); if (newStatement != null) { boolean writeNewStatement = (isNew || !newStatement .equals(statement)) && (currentIsNew || !newStatement .equals(currentStatement)); // noWrite: (newS == statement && !isNew) // || (newS == cur && !curIsNew) // Write: (newS != statement || isNew ) // && (newS != cur || curIsNew) statements.set(i, new StatementWithUpdate(newStatement, writeNewStatement)); // Impossible with default merge code: // Kept here for future extensions that may choose to not // reuse this id. if (!"".equals(statement.getStatementId()) && !newStatement.getStatementId().equals( statement.getStatementId())) { this.toDelete.add(statement.getStatementId()); } if (!"".equals(currentStatement.getStatementId()) && !newStatement.getStatementId().equals( currentStatement.getStatementId())) { this.toDelete.add(currentStatement.getStatementId()); } return; } } statements.add(new StatementWithUpdate(statement, isNew)); } else { List statements = new ArrayList<>(); statements.add(new StatementWithUpdate(statement, isNew)); this.toKeep.put(pid, statements); } } /** * Returns a statement obtained by merging two given statements, if * possible, or null if the statements cannot be merged. Statements are * merged if they contain the same claim, but possibly with qualifiers in a * different order. The statements may have different ids, ranks, and * references. References will be merged. Different ranks are supported if * one of the statement uses {@link StatementRank#NORMAL}, and the rank of * the other (non-normal) statement is used in this case; otherwise the * statements will not merge. The first statement takes precedence for * determining inessential details of the merger, such as the order of * qualifiers. * * @param statement1 * first statement * @param statement2 * second statement * @return merged statement or null if merging is not possible */ private Statement mergeStatements(Statement statement1, Statement statement2) { if (!equivalentClaims(statement1.getClaim(), statement2.getClaim())) { return null; } StatementRank newRank = statement1.getRank(); if (newRank == StatementRank.NORMAL) { newRank = statement2.getRank(); } else if (statement2.getRank() != StatementRank.NORMAL && newRank != statement2.getRank()) { return null; } String newStatementId = statement1.getStatementId(); if ("".equals(newStatementId)) { newStatementId = statement2.getStatementId(); } List newReferences = mergeReferences( statement1.getReferences(), statement2.getReferences()); return Datamodel.makeStatement(statement1.getClaim(), newReferences, newRank, newStatementId); } /** * Merges two lists of references, eliminating duplicates in the process. * * @param references1 * @param references2 * @return merged list */ protected List mergeReferences( List references1, List references2) { List result = new ArrayList<>(); for (Reference reference : references1) { addBestReferenceToList(reference, result); } for (Reference reference : references2) { addBestReferenceToList(reference, result); } return result; } protected void addBestReferenceToList(Reference reference, List referenceList) { for (Reference existingReference : referenceList) { if (isSameSnakSet(existingReference.getAllSnaks(), reference.getAllSnaks())) { return; } } referenceList.add(reference); } /** * Checks if two claims are equivalent in the sense that they have the same * main snak and the same qualifiers, but possibly in a different order. * * @param claim1 * @param claim2 * @return true if claims are equivalent */ protected boolean equivalentClaims(Claim claim1, Claim claim2) { return claim1.getMainSnak().equals(claim2.getMainSnak()) && isSameSnakSet(claim1.getAllQualifiers(), claim2.getAllQualifiers()); } /** * Compares two sets of snaks, given by iterators. The method is optimised * for short lists of snaks, as they are typically found in claims and * references. * * @param snaks1 * @param snaks2 * @return true if the lists are equal */ protected boolean isSameSnakSet(Iterator snaks1, Iterator snaks2) { ArrayList snakList1 = new ArrayList<>(5); while (snaks1.hasNext()) { snakList1.add(snaks1.next()); } int snakCount2 = 0; while (snaks2.hasNext()) { snakCount2++; Snak snak2 = snaks2.next(); boolean found = false; for (int i = 0; i < snakList1.size(); i++) { if (snak2.equals(snakList1.get(i))) { snakList1.set(i, null); found = true; break; } } if (!found) { return false; } } return snakCount2 == snakList1.size(); } /** * Sets the GUID generator for this statement update. */ public void setGuidGenerator(GuidGenerator generator) { guidGenerator = generator; } /** * Extracts the last revision id from the JSON response returned * by the API after an edit * * @param response * the response as returned by Mediawiki * @return * the new revision id of the edited entity * @throws JsonProcessingException */ protected long getRevisionIdFromResponse(JsonNode response) throws JsonProcessingException { if(response == null) { throw new MalformedResponseException("API response is null"); } JsonNode entity = null; if(response.has("entity")) { entity = response.path("entity"); } else if(response.has("pageinfo")) { entity = response.path("pageinfo"); } if(entity != null && entity.has("lastrevid")) { return entity.path("lastrevid").asLong(); } throw new MalformedResponseException("The last revision id could not be found in API response"); } /** * Extracts a particular data model instance from a JSON response * returned by MediaWiki. The location is described by a list of successive * fields to use, from the root to the target object. * * @param response * the API response as returned by MediaWiki * @param path * a list of fields from the root to the target object * @return * the parsed POJO object * @throws JsonProcessingException */ protected T getDatamodelObjectFromResponse(JsonNode response, List path, Class targetClass) throws JsonProcessingException { if(response == null) { throw new MalformedResponseException("The API response is null"); } JsonNode currentNode = response; for(String field : path) { if (!currentNode.has(field)) { throw new MalformedResponseException("Field '"+field+"' not found in API response."); } currentNode = currentNode.path(field); } return mapper.treeToValue(currentNode, targetClass); } } TermStatementUpdate.java000066400000000000000000000451671444772566300352640ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.implementation.TermImpl; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocument; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.JsonNode; /** * @deprecated Use {@link WikibaseDataEditor#editEntityDocument(EntityUpdate, boolean, String, List)} instead. * This class extends StatementUpdate to support update to terms (labels, * descriptions and aliases). * * Various safeguards are implemented in this interface: * - aliases are added and deleted independently * - duplicate aliases cannot be added * - adding an alias in a language that does not have a label sets the label instead * * @author antonin */ @Deprecated public class TermStatementUpdate extends StatementUpdate { static final Logger logger = LoggerFactory.getLogger(TermStatementUpdate.class); /** * Just adding a "write" field to keep track of whether * we have changed this value. That helps keep the edit cleaner. * * @author antonin */ private static class NameWithUpdate { public MonolingualTextValue value; public boolean write; public NameWithUpdate(MonolingualTextValue value, boolean write) { this.value = value; this.write = write; } } /** * Keeps track of the current state of aliases after updates. * * @author antonin */ private static class AliasesWithUpdate { public List aliases; public List added; public List deleted; public boolean write; public AliasesWithUpdate(List aliases, boolean write) { this.aliases = aliases; this.write = write; this.added = new ArrayList<>(); this.deleted = new ArrayList<>(); } } final protected TermedStatementDocument currentDocument; @JsonIgnore final Map newLabels; @JsonIgnore final Map newDescriptions; @JsonIgnore final Map newAliases; /** * Constructor. Plans an update on the statements and terms of a document. * Statements are merged according to StatementUpdate's logic. Labels and * descriptions will overwrite any existing values. The first aliases added * on a language where no label is available yet will be treated as a label * instead. Duplicate aliases are ignored. * * @param currentDocument * the current state of the entity * @param addStatements * the statements to be added to the entity. * @param deleteStatements * the statements to be removed from the entity * @param addLabels * the labels to be added to the entity * @param addDescriptions * the descriptions to be added to the entity * @param addAliases * the aliases to be added to the entity * @param deleteAliases * the aliases to be removed from the entity */ public TermStatementUpdate(TermedStatementDocument currentDocument, List addStatements, List deleteStatements, List addLabels, List addDescriptions, List addAliases, List deleteAliases) { super(currentDocument, addStatements, deleteStatements); this.currentDocument = currentDocument; // Fill the terms with their current values newLabels = initUpdatesFromCurrentValues(currentDocument.getLabels().values()); newDescriptions = initUpdatesFromCurrentValues(currentDocument.getDescriptions().values()); newAliases = new HashMap<>(); for(Map.Entry> entry : currentDocument.getAliases().entrySet()) { newAliases.put(entry.getKey(), new AliasesWithUpdate( new ArrayList<>(entry.getValue()), false)); } // Add changes processLabels(addLabels); processDescriptions(addDescriptions); processAliases(addAliases, deleteAliases); } /** * Initializes the list of current values for a type of terms (label or description). * * @param currentValues * current values for the type of terms * @return a map of updates (where all terms are marked as not for write) */ protected Map initUpdatesFromCurrentValues(Collection currentValues) { Map updates = new HashMap<>(); for(MonolingualTextValue label: currentValues) { updates.put(label.getLanguageCode(), new NameWithUpdate(label, false)); } return updates; } /** * Processes changes on aliases, updating the planned state of the item. * * @param addAliases * aliases that should be added to the document * @param deleteAliases * aliases that should be removed from the document */ protected void processAliases(List addAliases, List deleteAliases) { for(MonolingualTextValue val : addAliases) { addAlias(val); } for(MonolingualTextValue val : deleteAliases) { deleteAlias(val); } } /** * Deletes an individual alias * * @param alias * the alias to delete */ protected void deleteAlias(MonolingualTextValue alias) { String lang = alias.getLanguageCode(); AliasesWithUpdate currentAliases = newAliases.get(lang); if (currentAliases != null) { currentAliases.aliases.remove(alias); currentAliases.deleted.add(alias); currentAliases.write = true; } } /** * Adds an individual alias. It will be merged with the current * list of aliases, or added as a label if there is no label for * this item in this language yet. * * @param alias * the alias to add */ protected void addAlias(MonolingualTextValue alias) { String lang = alias.getLanguageCode(); AliasesWithUpdate currentAliasesUpdate = newAliases.get(lang); NameWithUpdate currentLabel = newLabels.get(lang); // If there isn't any label for that language, put the alias there if (currentLabel == null) { newLabels.put(lang, new NameWithUpdate(alias, true)); // If the new alias is equal to the current label, skip it } else if (!currentLabel.value.equals(alias)) { if (currentAliasesUpdate == null) { currentAliasesUpdate = new AliasesWithUpdate(new ArrayList<>(), true); } List currentAliases = currentAliasesUpdate.aliases; if(!currentAliases.contains(alias)) { currentAliases.add(alias); currentAliasesUpdate.added.add(alias); currentAliasesUpdate.write = true; } newAliases.put(lang, currentAliasesUpdate); } } /** * Adds descriptions to the item. * * @param descriptions * the descriptions to add */ protected void processDescriptions(List descriptions) { for(MonolingualTextValue description : descriptions) { NameWithUpdate currentValue = newDescriptions.get(description.getLanguageCode()); // only mark the description as added if the value we are writing is different from the current one if (currentValue == null || !currentValue.value.equals(description)) { newDescriptions.put(description.getLanguageCode(), new NameWithUpdate(description, true)); } } } /** * Adds labels to the item * * @param labels * the labels to add */ protected void processLabels(List labels) { for(MonolingualTextValue label : labels) { String lang = label.getLanguageCode(); NameWithUpdate currentValue = newLabels.get(lang); if (currentValue == null || !currentValue.value.equals(label)) { newLabels.put(lang, new NameWithUpdate(label, true)); // Delete any alias that matches the new label AliasesWithUpdate currentAliases = newAliases.get(lang); if (currentAliases != null && currentAliases.aliases.contains(label)) { deleteAlias(label); } } } } /** * Label accessor provided for JSON serialization only. */ @JsonProperty("labels") @JsonInclude(Include.NON_EMPTY) public Map getLabelUpdates() { return getMonolingualUpdatedValues(newLabels); } /** * Description accessor provided for JSON serialization only. */ @JsonProperty("descriptions") @JsonInclude(Include.NON_EMPTY) public Map getDescriptionUpdates() { return getMonolingualUpdatedValues(newDescriptions); } /** * Alias accessor provided for JSON serialization only */ @JsonProperty("aliases") @JsonInclude(Include.NON_EMPTY) public Map> getAliasUpdates() { Map> updatedValues = new HashMap<>(); for(Map.Entry entry : newAliases.entrySet()) { AliasesWithUpdate update = entry.getValue(); if (!update.write) { continue; } List convertedAliases = new ArrayList<>(); for(MonolingualTextValue alias : update.aliases) { convertedAliases.add(monolingualToJackson(alias)); } updatedValues.put(entry.getKey(), convertedAliases); } return updatedValues; } /** * Is this change null? (Which means that nothing at all * will be changed on the item.) */ @Override @JsonIgnore public boolean isEmptyEdit() { return (super.isEmptyEdit() && getLabelUpdates().isEmpty() && getDescriptionUpdates().isEmpty() && getAliasUpdates().isEmpty()); } /** * Retrieves the list of aliases that will be added in a * given language, after all the optimizations have been done * (replacing empty labels by new aliases in the same language, * for instance). * * @param language the language code of the added aliases * @return the list of added aliases */ public List getAddedAliases(String language) { AliasesWithUpdate update = newAliases.get(language); if (update == null) { return Collections.emptyList(); } return update.added; } /** * Retrieves the list of aliases that will be removed in a * given language, after all the optimizations have been done * (replacing empty labels by new aliases in the same language, * for instance). * * @param language: the language code of the removed aliases * @return the list of removed aliases */ public List getRemovedAliases(String language) { AliasesWithUpdate update = newAliases.get(language); if (update == null) { return Collections.emptyList(); } return update.deleted; } /** * Performs the update, selecting the appropriate API action depending on * the nature of the change. * * @param action * the endpoint to which the change should be pushed * @param editAsBot * if true, the edit will be flagged as a "bot edit" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * Ignored if null or empty. * @return the new document after update with the API * @throws MediaWikiApiErrorException * @throws IOException */ @Override public TermedStatementDocument performEdit( WbEditingAction action, boolean editAsBot, String summary, List tags) throws IOException, MediaWikiApiErrorException { Map labelUpdates = getLabelUpdates(); Map descriptionUpdates = getDescriptionUpdates(); Map> aliasUpdates = getAliasUpdates(); if (labelUpdates.isEmpty() && descriptionUpdates.isEmpty() && aliasUpdates.isEmpty()) { return (TermedStatementDocument) super.performEdit(action, editAsBot, summary, tags); } else { if (super.isEmptyEdit()) { if(labelUpdates.size() == 1 && descriptionUpdates.isEmpty() && aliasUpdates.isEmpty()) { // we only have a label in one language to update, so we use "wbsetlabel" String language = labelUpdates.keySet().iterator().next(); MonolingualTextValue value = labelUpdates.get(language); JsonNode response = action.wbSetLabel( currentDocument.getEntityId().getId(), null, null, null, language, value.getText(), editAsBot, currentDocument.getRevisionId(), summary, tags); MonolingualTextValue respondedLabel = getDatamodelObjectFromResponse(response, Arrays.asList("entity","labels",language), TermImpl.class); long revisionId = getRevisionIdFromResponse(response); return this.currentDocument.withRevisionId(revisionId).withLabel(respondedLabel); } else if (labelUpdates.isEmpty() && descriptionUpdates.size() == 1 && aliasUpdates.isEmpty()) { // we only have a label in one language to update, so we use "wbsetlabel" String language = descriptionUpdates.keySet().iterator().next(); MonolingualTextValue value = descriptionUpdates.get(language); JsonNode response = action.wbSetDescription( currentDocument.getEntityId().getId(), null, null, null, language, value.getText(), editAsBot, currentDocument.getRevisionId(), summary, tags); MonolingualTextValue respondedDescription = getDatamodelObjectFromResponse(response, Arrays.asList("entity","descriptions",language), TermImpl.class); long revisionId = getRevisionIdFromResponse(response); return currentDocument.withRevisionId(revisionId).withDescription(respondedDescription); } else if (labelUpdates.isEmpty() && descriptionUpdates.isEmpty() && aliasUpdates.size() == 1) { // we only have aliases in one language to update, so we use "wbsetaliases" String language = aliasUpdates.keySet().iterator().next(); List addedValues = getAddedAliases(language); List removedValues = getRemovedAliases(language); List addedStrings = new ArrayList<>(addedValues.size()); for(MonolingualTextValue v : addedValues) { addedStrings.add(v.getText()); } List removedStrings = new ArrayList<>(removedValues.size()); for(MonolingualTextValue v : removedValues) { removedStrings.add(v.getText()); } JsonNode response = action.wbSetAliases( currentDocument.getEntityId().getId(), null, null, null, language, addedStrings, removedStrings, null, editAsBot, currentDocument.getRevisionId(), summary, tags); long revisionId = getRevisionIdFromResponse(response); TermImpl[] respondedAliases = getDatamodelObjectFromResponse(response, Arrays.asList("entity","aliases",language), TermImpl[].class); List newAliases = Arrays.asList(respondedAliases); return currentDocument.withRevisionId(revisionId).withAliases(language, newAliases); } } // All other cases: we do a full-blown "wbeditentity" EntityDocument response = action.wbEditEntity(currentDocument .getEntityId().getId(), null, null, null, getJsonUpdateString(), false, editAsBot, currentDocument .getRevisionId(), summary, tags); return (TermedStatementDocument) response; } } /** * Helper to format term updates as expected by the Wikibase API * @param updates * planned updates for the type of term * @return map ready to be serialized as JSON by Jackson */ protected Map getMonolingualUpdatedValues(Map updates) { Map updatedValues = new HashMap<>(); for(NameWithUpdate update : updates.values()) { if (!update.write) { continue; } updatedValues.put(update.value.getLanguageCode(), monolingualToJackson(update.value)); } return updatedValues; } /** * Creates a monolingual value that is suitable for JSON serialization. * @param monolingualTextValue * target monolingual value for serialization * @return Jackson implementation that is serialized appropriately */ protected TermImpl monolingualToJackson(MonolingualTextValue monolingualTextValue) { return new TermImpl(monolingualTextValue.getLanguageCode(), monolingualTextValue.getText()); } } WbEditingAction.java000066400000000000000000001045241444772566300343300ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.wikibaseapi.apierrors.MaxlagErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.TokenErrorException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * Java implementation for the wbeditentity API action. * * @author Michael Guenther * @author Markus Kroetzsch * @author Antonin Delpeuch */ public class WbEditingAction { static final Logger logger = LoggerFactory .getLogger(WbEditingAction.class); /** * Connection to an Wikibase API. */ final ApiConnection connection; /** * The IRI that identifies the site that the data is from. */ final String siteIri; /** * Mapper object used for deserializing JSON data. */ final ObjectMapper mapper; /** * Value in seconds of MediaWiki's maxlag parameter. Shorter is nicer, * longer is more aggressive. */ int maxLag = 5; /** * Number of times we should retry if an editing action fails because * the lag is too high. */ int maxLagMaxRetries = 14; /** * Initial wait time in milliseconds, when an edit fails for the first * time because of a high lag. This wait time is going to be multiplied * by maxLagBackOffFactor for the subsequent waits. */ int maxLagFirstWaitTime = 1000; /** * Factor by which the wait time between two maxlag retries should be * multiplied at each attempt. */ double maxLagBackOffFactor = 1.5; /** * Number of recent editing times to monitor in order to avoid editing too * fast. Wikidata.org seems to block fast editors after 9 edits, so this * size seems to make sense. */ final static int editTimeWindow = 9; /** * Average time to wait after each edit. Individual edits can be faster than * this, but it is ensured that this time will be taken per edit in the long * run. */ int averageMsecsPerEdit = 2000; /** * Times of the last {@link #editTimeWindow} edits. Used in a loop. Most * recent edit time is at {@link #curEditTimeSlot}. */ final long[] recentEditTimes = new long[editTimeWindow]; /** * @see #recentEditTimes */ int curEditTimeSlot = 0; /** * Number of edits that will be performed before the object enters * simulation mode, or -1 if there is no limit on the number of edits. */ int remainingEdits = -1; /** * Creates an object to modify data on a Wikibase site. The API is used to * request the changes. The site URI is necessary since it is not contained * in the data retrieved from the API. * * @param connection * {@link ApiConnection} Object to send the requests * @param siteIri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" */ public WbEditingAction(ApiConnection connection, String siteIri) { this.connection = connection; this.siteIri = siteIri; this.mapper = new DatamodelMapper(siteIri); } /** * Returns the current value of the maxlag parameter. It specifies the * number of seconds. To save actions causing any more site replication lag, * this parameter can make the client wait until the replication lag is less * than the specified value. In case of excessive lag, error code "maxlag" * is returned upon API requests. * * @return current setting of the maxlag parameter */ public int getMaxLag() { return this.maxLag; } /** * Set the value of the maxlag parameter. If unsure, keep the default. See * {@link #getMaxLag()} for details. * * @param maxLag * the new value in seconds */ public void setMaxLag(int maxLag) { this.maxLag = maxLag; } /** * Returns the number of edits that will be performed before entering * simulation mode, or -1 if there is no limit on the number of edits * (default). * * @return number of remaining edits */ public int getRemainingEdits() { return this.remainingEdits; } /** * Sets the number of edits that this object can still perform. Thereafter, * edits will only be prepared but not actually performed in the Web API. * This function is useful to do a defined number of test edits. If this * number is set to 0, then no edits will be performed. If it is set to -1 * (or any other negative number), then there is no limit on the edits * performed. * * @param remainingEdits * number of edits that can still be performed, or -1 to disable * this limit (default setting) */ public void setRemainingEdits(int remainingEdits) { this.remainingEdits = remainingEdits; } /** * Returns the average time in milliseconds that one edit will take. This * time is enforced to avoid overloading the site with too many edits, and * also to throttle the rate of editing (which is useful to stop a bot in * case of errors). Individual edits can be faster than this, but if several * consecutive edits are above this rate, the program will pause until the * expected speed is reached again. The delay is based on real system time. * This means that it will only wait as long as necessary. If your program * takes time between edits for other reasons, there will be no additional * delay caused by this feature. * * @return average time per edit in milliseconds */ public int getAverageTimePerEdit() { return this.averageMsecsPerEdit; } /** * Sets the average time that a single edit should take, measured in * milliseconds. See {@link #getAverageTimePerEdit()} for details. * * @param milliseconds * the new value in milliseconds */ public void setAverageTimePerEdit(int milliseconds) { this.averageMsecsPerEdit = milliseconds; } /** * Executes the API action "wbeditentity" for the given parameters. Created * or modified items are returned as a result. In particular, this is * relevant to find out about the id assigned to a newly created entity. *

    * Unless the parameter clear is true, data of existing entities will be * modified or added, but not deleted. For labels, descriptions, and * aliases, this happens by language. In particular, if an item has English * and German aliases, and an edit action writes a new English alias, then * this new alias will replace all previously existing English aliases, * while the German aliases will remain untouched. In contrast, adding * statements for a certain property will not delete existing statements of * this property. In fact, it is even possible to create many copies of the * exact same statement. A special JSON syntax exists for deleting specific * statements. *

    * See the online API documentation for further information. *

    * TODO: There is currently no way to delete the label, description, or * aliases for a particular language without clearing all data. Empty * strings are not accepted. One might achieve this by adapting the JSON * serialization to produce null values for such strings, and for alias * lists that contain only such strings. * * @param id * the id of the entity to be edited; if used, the site and title * parameters must be null * @param site * when selecting an entity by title, the site key for the title, * e.g., "enwiki"; if used, title must also be given but id must * be null * @param title * string used to select an entity by title; if used, site must * also be given but id must be null * @param newEntity * used for creating a new entity of a given type; the value * indicates the intended entity type; possible values include * "item" and "property"; if used, the parameters id, site, and * title must be null * @param data * JSON representation of the data that is to be written; this is * a mandatory parameter * @param clear * if true, existing data will be cleared (deleted) before * writing the new data * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the JSON response as returned by the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error */ public EntityDocument wbEditEntity(String id, String site, String title, String newEntity, String data, boolean clear, boolean bot, long baserevid, String summary, List tags) throws IOException, MediaWikiApiErrorException { Validate.notNull(data, "Data parameter cannot be null when editing entity data"); Map parameters = new HashMap<>(); parameters.put("data", data); if (clear) { parameters.put("clear", ""); } JsonNode response = performAPIAction("wbeditentity", id, site, title, newEntity, parameters, summary, tags, baserevid, bot); return getEntityDocumentFromResponse(response); } /** * Executes the API action "wbsetlabel" for the given parameters. * @param id * the id of the entity to be edited; if used, the site and title * parameters must be null * @param site * when selecting an entity by title, the site key for the title, * e.g., "enwiki"; if used, title must also be given but id must * be null * @param title * string used to select an entity by title; if used, site must * also be given but id must be null * @param newEntity * used for creating a new entity of a given type; the value * indicates the intended entity type; possible values include * "item" and "property"; if used, the parameters id, site, and * title must be null * @param language * the language code for the label * @param value * the value of the label to set. Set it to null to remove the label. * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the label as returned by the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * @throws MediaWikiApiErrorException */ public JsonNode wbSetLabel(String id, String site, String title, String newEntity, String language, String value, boolean bot, long baserevid, String summary, List tags) throws IOException, MediaWikiApiErrorException { Validate.notNull(language, "Language parameter cannot be null when setting a label"); Map parameters = new HashMap<>(); parameters.put("language", language); if (value != null) { parameters.put("value", value); } return performAPIAction("wbsetlabel", id, site, title, newEntity, parameters, summary, tags, baserevid, bot); } /** * Executes the API action "wbsetlabel" for the given parameters. * @param id * the id of the entity to be edited; if used, the site and title * parameters must be null * @param site * when selecting an entity by title, the site key for the title, * e.g., "enwiki"; if used, title must also be given but id must * be null * @param title * string used to select an entity by title; if used, site must * also be given but id must be null * @param newEntity * used for creating a new entity of a given type; the value * indicates the intended entity type; possible values include * "item" and "property"; if used, the parameters id, site, and * title must be null * @param language * the language code for the label * @param value * the value of the label to set. Set it to null to remove the label. * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the JSON response from the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * @throws MediaWikiApiErrorException */ public JsonNode wbSetDescription(String id, String site, String title, String newEntity, String language, String value, boolean bot, long baserevid, String summary, List tags) throws IOException, MediaWikiApiErrorException { Validate.notNull(language, "Language parameter cannot be null when setting a description"); Map parameters = new HashMap<>(); parameters.put("language", language); if (value != null) { parameters.put("value", value); } return performAPIAction("wbsetdescription", id, site, title, newEntity, parameters, summary, tags, baserevid, bot); } /** * Executes the API action "wbsetaliases" for the given parameters. * * @param id * the id of the entity to be edited; if used, the site and title * parameters must be null * @param site * when selecting an entity by title, the site key for the title, * e.g., "enwiki"; if used, title must also be given but id must * be null * @param title * string used to select an entity by title; if used, site must * also be given but id must be null * @param newEntity * used for creating a new entity of a given type; the value * indicates the intended entity type; possible values include * "item" and "property"; if used, the parameters id, site, and * title must be null * @param language * the language code for the label * @param add * the values of the aliases to add. They will be merged with the * existing aliases. This parameter cannot be used in conjunction * with "set". * @param remove * the values of the aliases to remove. Other aliases will be retained. * This parameter cannot be used in conjunction with "set". * @param set * the values of the aliases to set. This will erase any existing * aliases in this language and replace them by the given list. * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the JSON response from the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * @throws MediaWikiApiErrorException */ public JsonNode wbSetAliases(String id, String site, String title, String newEntity, String language, List add, List remove, List set, boolean bot, long baserevid, String summary, List tags) throws IOException, MediaWikiApiErrorException { Validate.notNull(language, "Language parameter cannot be null when setting aliases"); Map parameters = new HashMap<>(); parameters.put("language", language); if (set != null) { if (add != null || remove != null) { throw new IllegalArgumentException( "Cannot use parameters \"add\" or \"remove\" when using \"set\" to edit aliases"); } parameters.put("set", ApiConnection.implodeObjects(set)); } if (add != null) { parameters.put("add", ApiConnection.implodeObjects(add)); } if (remove != null) { parameters.put("remove", ApiConnection.implodeObjects(remove)); } return performAPIAction("wbsetaliases", id, site, title, newEntity, parameters, summary, tags, baserevid, bot); } /** * Executes the API action "wbsetclaim" for the given parameters. * * @param statement * the JSON serialization of claim to add or delete. * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the JSON response from the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * @throws MediaWikiApiErrorException */ public JsonNode wbSetClaim(String statement, boolean bot, long baserevid, String summary, List tags) throws IOException, MediaWikiApiErrorException { Validate.notNull(statement, "Statement parameter cannot be null when adding or changing a statement"); Map parameters = new HashMap<>(); parameters.put("claim", statement); return performAPIAction("wbsetclaim", null, null, null, null, parameters, summary, tags, baserevid, bot); } /** * Executes the API action "wbremoveclaims" for the given parameters. * * @param statementIds * the statement ids to delete * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the JSON response from the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * @throws MediaWikiApiErrorException */ public JsonNode wbRemoveClaims(List statementIds, boolean bot, long baserevid, String summary, List tags) throws IOException, MediaWikiApiErrorException { Validate.notNull(statementIds, "statementIds parameter cannot be null when deleting statements"); Validate.notEmpty(statementIds, "statement ids to delete must be non-empty when deleting statements"); Validate.isTrue(statementIds.size() <= 50, "At most 50 statements can be deleted at once"); Map parameters = new HashMap<>(); parameters.put("claim", String.join("|", statementIds)); return performAPIAction("wbremoveclaims", null, null, null, null, parameters, summary, tags, baserevid, bot); } /** * Executes an editing API action for the given parameters. The resulting * entity returned by Wikibase is returned as a result. *

    * See the online API documentation for further information. * * @param id * the id of the entity to be edited; if used, the site and title * parameters must be null * @param site * when selecting an entity by title, the site key for the title, * e.g., "enwiki"; if used, title must also be given but id must * be null * @param title * string used to select an entity by title; if used, site must * also be given but id must be null * @param newEntity * used for creating a new entity of a given type; the value * indicates the intended entity type; possible values include * "item" and "property"; if used, the parameters id, site, and * title must be null * @param parameters * the other parameters which are specific to the particular * action being carried out * @param bot * if true, edits will be flagged as "bot edits" provided that * the logged in user is in the bot group; for regular users, the * flag will just be ignored * @param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted; when used, the site will ensure that * no edit has happened since this revision to detect edit * conflicts; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. Ignored if null. * @return the JSON response from the API * @throws IOException * if there was an IO problem. such as missing network * connection * @throws MediaWikiApiErrorException * if the API returns an error */ private JsonNode performAPIAction( String action, String id, String site, String title, String newEntity, Map parameters, String summary, List tags, long baserevid, boolean bot) throws IOException, MediaWikiApiErrorException { parameters.put(ApiConnection.PARAM_ACTION, action); if (newEntity != null) { parameters.put("new", newEntity); if (title != null || site != null || id != null) { throw new IllegalArgumentException( "Cannot use parameters \"id\", \"site\", or \"title\" when creating a new entity."); } } else if (id != null) { parameters.put("id", id); if (title != null || site != null) { throw new IllegalArgumentException( "Cannot use parameters \"site\" or \"title\" when using id to edit entity data"); } } else if (title != null) { if (site == null) { throw new IllegalArgumentException( "Site parameter is required when using title parameter to edit entity data."); } parameters.put("site", site); parameters.put("title", title); } else if (!"wbsetclaim".equals(action) && !"wbremoveclaims".equals(action)) { throw new IllegalArgumentException( "This action must create a new item, or specify an id, or specify a site and title."); } if (bot) { parameters.put("bot", ""); } if (baserevid != 0) { parameters.put("baserevid", Long.toString(baserevid)); } if (summary != null) { parameters.put("summary", summary); } if (tags != null && !tags.isEmpty()) { parameters.put("tags", String.join("|", tags)); } parameters.put("maxlag", Integer.toString(this.maxLag)); parameters.put("token", connection.getOrFetchToken("csrf")); if (this.remainingEdits > 0) { this.remainingEdits--; } else if (this.remainingEdits == 0) { logger.info("Not editing entity (simulation mode). Request parameters were: " + parameters.toString()); return null; } checkEditSpeed(); JsonNode result = null; int retry = getMaxLagMaxRetries(); int maxLagSleepTime = getMaxLagFirstWaitTime(); MediaWikiApiErrorException lastException = null; while (retry > 0) { try { result = this.connection.sendJsonRequest("POST", parameters); break; } catch (TokenErrorException e) { // try again with a fresh token lastException = e; connection.clearToken("csrf"); parameters.put("token", connection.getOrFetchToken("csrf")); } catch (MaxlagErrorException e) { // wait for 5 seconds lastException = e; logger.warn(e.getMessage() + String.format(" -- pausing for %d milliseconds.", maxLagSleepTime)); try { Thread.sleep(maxLagSleepTime); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } maxLagSleepTime *= getMaxLagBackOffFactor(); } retry--; } if (retry == 0 && lastException != null) { logger.error("Gave up after several retries. Last error was: " + lastException.toString()); throw lastException; } return result; } /** * TODO: TO BE REFACTORED * @param root * @return * @throws IOException */ protected EntityDocument getEntityDocumentFromResponse(JsonNode root) throws IOException { if (root == null) { return null; } if (root.has("item")) { return parseJsonResponse(root.path("item")); } else if (root.has("property")) { // TODO: not tested because of missing // permissions return parseJsonResponse(root.path("property")); } else if (root.has("entity")) { return parseJsonResponse(root.path("entity")); } else { throw new MalformedResponseException( "No entity document found in API response."); } } /** * Parse a JSON response to extract an entity document. *

    * TODO This method currently contains code to work around Wikibase issue * https://phabricator.wikimedia.org/T73349. This should be removed once the * issue is fixed. * * @param entityNode * the JSON node that should contain the entity document data * @return the entity document, or null if there were unrecoverable errors * @throws IOException */ private EntityDocument parseJsonResponse(JsonNode entityNode) throws IOException { return mapper.readerFor(EntityDocumentImpl.class) .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .readValue(entityNode); } /** * Makes sure that we are not editing too fast. The method stores the last * {@link WbEditingAction#editTimeWindow} time points when an edit was * made. If the time since the oldest edit in this window is shorter than * {@link #averageMsecsPerEdit} milliseconds, then the method will pause the * thread for the remaining time. */ private void checkEditSpeed() { long currentTime = System.nanoTime(); int nextIndex = (this.curEditTimeSlot + 1) % editTimeWindow; if (this.recentEditTimes[nextIndex] != 0 && (currentTime - this.recentEditTimes[nextIndex]) / 1000000 < this.averageMsecsPerEdit * editTimeWindow) { long sleepTime = this.averageMsecsPerEdit * editTimeWindow - (currentTime - this.recentEditTimes[nextIndex]) / 1000000; logger.info("We are editing too fast. Pausing for " + sleepTime + " milliseconds."); try { Thread.sleep(sleepTime); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } currentTime = System.nanoTime(); } this.recentEditTimes[nextIndex] = currentTime; this.curEditTimeSlot = nextIndex; } /** * Number of times we should retry if an editing action fails because * the lag is too high. */ public int getMaxLagMaxRetries() { return maxLagMaxRetries; } /** * Number of times we should retry if an editing action fails because * the lag is too high. */ public void setMaxLagMaxRetries(int retries) { maxLagMaxRetries = retries; } /** * Initial wait time in milliseconds, when an edit fails for the first * time because of a high lag. This wait time is going to be multiplied * by maxLagBackOffFactor for the subsequent waits. */ public int getMaxLagFirstWaitTime() { return maxLagFirstWaitTime; } /** * Initial wait time in milliseconds, when an edit fails for the first * time because of a high lag. This wait time is going to be multiplied * by maxLagBackOffFactor for the subsequent waits. */ public void setMaxLagFirstWaitTime(int time) { maxLagFirstWaitTime = time; } /** * Factor by which the wait time between two maxlag retries should be * multiplied at each attempt. */ public double getMaxLagBackOffFactor() { return maxLagBackOffFactor; } /** * Factor by which the wait time between two maxlag retries should be * multiplied at each attempt. */ public void setMaxLagBackOffFactor(double value) { maxLagBackOffFactor = value; } /** * Retrieves the current lag from the target site, by making an API call. * * @throws MediaWikiApiErrorException * when an unexpected MediaWiki API error happened (not the spurious * one normally returned by MediaWiki when retrieving lag). * @throws IOException * when communication with the server failed. * */ public double getCurrentLag() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("maxlag", "-1"); try { this.connection.sendJsonRequest("POST", parameters); } catch (MaxlagErrorException e) { return e.getLag(); } throw new IllegalStateException("MediaWiki did not return any maxlag value"); } } WbGetEntitiesAction.java000066400000000000000000000233761444772566300351760ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper; import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl; import org.wikidata.wdtk.datamodel.implementation.EntityIdValueImpl; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * Java implementation of the wbgetentities action. * * @author Michael Guenther * */ public class WbGetEntitiesAction { /** * Connection to an Wikibase API. */ final ApiConnection connection; /** * The IRI that identifies the site that the data is from. */ final String siteIri; /** * Mapper object used for deserializing JSON data. */ final ObjectMapper mapper; /** * Creates an object to fetch data from the given ApiConnection. The site * URI is necessary since it is not contained in the data retrieved from the * API. * * @param connection * {@link ApiConnection} Object to send the requests * @param siteIri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" */ public WbGetEntitiesAction(ApiConnection connection, String siteIri) { this.connection = connection; this.siteIri = siteIri; this.mapper = new DatamodelMapper(siteIri); } /** * Creates a map of identifiers or page titles to documents retrieved via * the API URL * * @param properties * parameter setting for wbgetentities * @return map of document identifiers or titles to documents retrieved via * the API URL * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * if we encounter network issues or HTTP 500 errors from Wikibase * @throws MalformedResponseException * if one of the returned entities cannot be parsed */ public Map wbGetEntities( WbGetEntitiesActionData properties) throws MediaWikiApiErrorException, IOException { return wbGetEntities(properties.ids, properties.sites, properties.titles, properties.props, properties.languages, properties.sitefilter); } /** * Creates a map of identifiers or page titles to documents retrieved via * the API. All parameters that accept lists expect the pipe character | to * be used as a separator, as created by * {@link ApiConnection#implodeObjects(Iterable)}. There is a limit on how * many entities can be retrieved in one request, usually 50 by default and * 500 for bots. This limit may also apply to the number of language codes * and sites used for filtering. *

    * If an error occurs (e.g., exceptions trying to access the Web API), * the exception will be propagated to the caller. * * @param ids * list of ids of entities for which data should be retrieved * @param sites * site key (e.g. "enwiki"); used together with parameters * "titles"; the API supports the use of many site keys with a * single title, but this implementation does not support this * (the resulting map will use title strings for keys) * @param titles * list of titles of the page corresponding to the requested * entities on the given site; use together with 'sites', but * only give one site for several titles or several sites for one * title * @param props * list of strings that specifies what kind of data should be * retrieved for each entity; possible values include "info", * "sitelinks", "sitelinks/urls", "aliases", "labels", * "descriptions", "claims" (statements), "datatype"; additional * filters may apply; defaults to * "info|sitelinks|aliases|labels|descriptions|claims|datatype" * @param languages * list of language codes to return labels, aliases or * descriptions for; if omitted, data for all languages is * returned * @param sitefilter * list of site keys to return sitelinks for; if omitted, data * for all languages is returned * * @return map of document identifiers or titles to documents retrieved via * the API URL * @throws MediaWikiApiErrorException * if the API returns an error * @throws IOException * if we encounter network errors, or HTTP 500 errors on Wikibase's side * @throws IllegalArgumentException * if the given combination of parameters does not make sense * @throws MalformedResponseException * if one of the returned entities cannot be parsed */ public Map wbGetEntities(String ids, String sites, String titles, String props, String languages, String sitefilter) throws MediaWikiApiErrorException, IOException { Map parameters = new HashMap<>(); parameters.put(ApiConnection.PARAM_ACTION, "wbgetentities"); List titlesList = titles == null ? Collections.emptyList() : Arrays.asList(titles.split("\\|")); if (ids != null) { parameters.put("ids", ids); if (titles != null || sites != null) { throw new IllegalArgumentException( "Cannot use parameters \"sites\" or \"titles\" when using ids to get entity data"); } } else if (titles != null) { parameters.put("titles", titles); if (sites == null) { throw new IllegalArgumentException( "Sites parameter is required when using titles parameter to get entity data."); } parameters.put("sites", sites); } else { throw new IllegalArgumentException( "Either ids, or titles and site must be specified for this action."); } if (props != null) { parameters.put("props", props); } if (languages != null) { parameters.put("languages", languages); } if (sitefilter != null) { parameters.put("sitefilter", sitefilter); } Map result = new HashMap<>(); JsonNode root = this.connection.sendJsonRequest("POST", parameters); JsonNode entities = root.path("entities"); Iterator> entitiesIterator = entities.fields(); int i = 0; while(entitiesIterator.hasNext()) { Entry entry = entitiesIterator.next(); JsonNode entityNode = entry.getValue(); if(!entityNode.has("missing")) { try { EntityDocument ed = mapper.reader() .with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) .treeToValue(entityNode, EntityDocumentImpl.class); if (titles == null) { // We use the JSON key rather than the id of the value // so that retrieving redirected entities works. result.put(entry.getKey(), ed); } else { if (ed instanceof ItemDocument) { SiteLink siteLink = ((ItemDocument) ed).getSiteLinks().get(sites); if(siteLink != null) { result.put(siteLink.getPageTitle(), ed); } } else if(ed instanceof MediaInfoDocument) { result.put(entityNode.get("title").textValue(), ed); } } } catch (JsonProcessingException e) { throw new MalformedResponseException( "Error when reading JSON for entity " + entityNode.path("id").asText("UNKNOWN"), e); } } else if(entityNode.has("id")) { try { EntityIdValue entityIdValue = EntityIdValueImpl.fromId(entityNode.get("id").asText(), siteIri); if(entityIdValue instanceof MediaInfoIdValue) { //TODO: bad hack, it would be much nicer if the API would return the page title MediaInfoDocument emptyDocument = Datamodel.makeMediaInfoDocument((MediaInfoIdValue) entityIdValue); String key = null; if (titles != null) { key = titlesList.get(i); } else { key = entityIdValue.getId(); } result.put(key, emptyDocument); } } catch (IllegalArgumentException e) { throw new MalformedResponseException( "Invalid entity id returned: " + entityNode.get("id").asText(), e); } } i++; } return result; } } WbGetEntitiesActionData.java000066400000000000000000000044351444772566300357630ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * This class is a simple record that holds the properties of an wbgetentities * action request. It is used internally by {@link WikibaseDataFetcher} to * gather parameters for the request. * * @author Michael Guenther * */ public class WbGetEntitiesActionData { /** * List of ids for entities. Use | as a separator. See * {@link WbGetEntitiesAction#wbGetEntities(String, String, String, String, String, String)} * for details. */ public String ids = null; /** * List of site keys. Use | as a separator. See * {@link WbGetEntitiesAction#wbGetEntities(String, String, String, String, String, String)} * for details. */ public String sites = null; /** * List of page titles. Use | as a separator. See * {@link WbGetEntitiesAction#wbGetEntities(String, String, String, String, String, String)} * for details. */ public String titles = null; /** * List of strings that define which data should be returned. Use | as a * separator. See * {@link WbGetEntitiesAction#wbGetEntities(String, String, String, String, String, String)} * for details. */ public String props = null; /** * List of language codes for restricting language-specific data. Use | as a * separator. See * {@link WbGetEntitiesAction#wbGetEntities(String, String, String, String, String, String)} * for details. */ public String languages = null; /** * List of site keys for restricting site links. Use | as a separator. See * {@link WbGetEntitiesAction#wbGetEntities(String, String, String, String, String, String)} * for details. */ public String sitefilter = null; } WbGetEntitiesSearchData.java000066400000000000000000000050211444772566300357430ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * This class is a simple record that holds the properties of an wbgetentities * action request. It is used internally by {@link WikibaseDataFetcher} to * gather parameters for the request. * * @author Michael Guenther * */ public class WbGetEntitiesSearchData { /** * search for this text. See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public String search = null; /** * search in this language. See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public String language = null; /** * whether to disable language fallback. See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public Boolean strictlanguage = null; /** * search for this type of entity * One of the following values: item, property. See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public String type = null; /** * maximal number of results * no more than 50 (500 for bots) allowed. See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public Long limit = null; /** * offset where to continue a search * this parameter is called "continue" in the API (which is a Java keyword). See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public Long offset = null; /** * get response in this language. See * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} * for details. */ public String uselang = null; } WbSearchEntitiesAction.java000066400000000000000000000140351444772566300356540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * Java implementation of the wbsearchentities action. * * @author Sören Brunk * */ public class WbSearchEntitiesAction { /** * Connection to a Wikibase API. */ private final ApiConnection connection; /** * Mapper object used for deserializing JSON data. */ private final ObjectMapper mapper = new ObjectMapper(); /** * Creates an object to fetch data from the given ApiConnection. The site * URI is necessary since it is not contained in the data retrieved from the * API. * * @param connection * {@link ApiConnection} Object to send the requests * @param siteUri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" */ public WbSearchEntitiesAction(ApiConnection connection, String siteUri) { this.connection = connection; } public List wbSearchEntities(WbGetEntitiesSearchData properties) throws MediaWikiApiErrorException, IOException { return wbSearchEntities(properties.search, properties.language, properties.strictlanguage, properties.type, properties.limit, properties.offset, properties.uselang); } /** * Keeping this for backwards compatibility, the real action happens in * {@link WbSearchEntitiesAction#wbSearchEntities(String, String, Boolean, String, Long, Long, String)} */ public List wbSearchEntities(String search, String language, Boolean strictLanguage, String type, Long limit, Long offset) throws MediaWikiApiErrorException, IOException { return wbSearchEntities(search, language, strictLanguage, type, limit, offset, null); } /** * Executes the API action "wbsearchentity" for the given parameters. * Searches for entities using labels and aliases. Returns a label and * description for the entity in the user language if possible. Returns * details of the matched term. The matched term text is also present in the * aliases key if different from the display label. * *

    * See the online API documentation for further information. *

    * * @param search * (required) search for this text * @param language * (required) search in this language * @param strictLanguage * (optional) whether to disable language fallback * @param type * (optional) search for this type of entity * One of the following values: item, property * Default: item * @param limit * (optional) maximal number of results * no more than 50 (500 for bots) allowed * Default: 7 * @param offset * (optional) offset where to continue a search * Default: 0 * this parameter is called "continue" in the API (which is a Java keyword) * @param uselang * (optional) the response should have this language, default en * @return list of matching entities retrieved via the API URL * @throws MediaWikiApiErrorException * if the API returns an error * @throws IllegalArgumentException * if the given combination of parameters does not make sense * @throws MalformedResponseException * if response JSON cannot be parsed */ public List wbSearchEntities(String search, String language, Boolean strictLanguage, String type, Long limit, Long offset, String uselang) throws MediaWikiApiErrorException, IOException { Map parameters = new HashMap<>(); parameters.put(ApiConnection.PARAM_ACTION, "wbsearchentities"); if (search != null) { parameters.put("search", search); } else { throw new IllegalArgumentException( "Search parameter must be specified for this action."); } if (language != null) { parameters.put("language", language); } else { throw new IllegalArgumentException( "Language parameter must be specified for this action."); } if (strictLanguage != null) { parameters.put("strictlanguage", Boolean.toString(strictLanguage)); } if (type != null) { parameters.put("type", type); } if (limit != null) { parameters.put("limit", Long.toString(limit)); } if (offset != null) { parameters.put("continue", Long.toString(offset)); } if (uselang != null) { parameters.put("uselang", uselang); } List results = new ArrayList<>(); JsonNode root = this.connection.sendJsonRequest("POST", parameters); JsonNode entities = root.path("search"); for (JsonNode entityNode : entities) { try { JacksonWbSearchEntitiesResult ed = mapper.treeToValue(entityNode, JacksonWbSearchEntitiesResult.class); results.add(ed); } catch (JsonProcessingException e) { throw new MalformedResponseException( "Error when reading JSON for entity " + entityNode.path("id").asText("UNKNOWN"), e); } } return results; } } WbSearchEntitiesResult.java000066400000000000000000000062161444772566300357170ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2017 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.List; /** * Represents the result of a wbsearchentities action. * * @author Sören Brunk */ public interface WbSearchEntitiesResult { /** * Represents information about how a document matched the query */ interface Match { /** * Returns the type (field) of the matching term * e.g "entityId", "label" or "alias". * * @return type (field) of the match */ String getType(); /** * Returns the language of the matching term field. * * @return language of the match */ String getLanguage(); /** * Returns the text of the matching term. * * @return text of the match */ String getText(); } /** * Returns the id of the entity that the document refers to. * * @return the entity ID */ String getEntityId(); /** * Returns the full concept URI (the site IRI with entity ID). * * @return full concept URI */ String getConceptUri(); /** * The URL of the wiki site that shows the concept. * * @return wiki site URL */ String getUrl(); /** * Returns the title of the entity (currently the same as the entity ID). */ String getTitle(); /** * Returns the internal Mediawiki pageid of the entity. * * @return internal pageid */ long getPageId(); /** * Returns the label of the entity. * * The language of the returned label depends on the HTTP * * Accept-Language header or the uselang URL parameter. * * @return the label of the entity */ String getLabel(); /** * Returns the description of the entity * * The language of the returned description depends on the HTTP * * Accept-Language header or the uselang URL parameter. * * @return the description */ String getDescription(); /** * Returns detailed information about how a document matched the query. * * @return match information */ Match getMatch(); /** * A list of alias labels (returned only when an alias matched the query). * * @return a list of aliases */ List getAliases(); } WikibaseDataEditor.java000066400000000000000000001431731444772566300350200ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import static java.util.stream.Collectors.toList; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.wikidata.wdtk.datamodel.helpers.EntityUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.JsonSerializer; import org.wikidata.wdtk.datamodel.helpers.LabeledDocumentUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.LexemeUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementDocumentUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermedDocumentUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.AliasUpdate; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.LabeledStatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeIdValue; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementDocumentUpdate; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocument; import org.wikidata.wdtk.datamodel.interfaces.TermedStatementDocumentUpdate; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; /** * Class that provides high-level editing functionality for Wikibase data. * * @author Markus Kroetzsch * */ public class WikibaseDataEditor { /** * API Action to edit data. */ final WbEditingAction wbEditingAction; /** * Helper class to read data. Used for checking the state of the online data * before editing. */ final WikibaseDataFetcher wikibaseDataFetcher; /** * GUID generator, for editing actions that require generating fresh GUIDs * client-side. */ final GuidGenerator guidGenerator; /** * The IRI that identifies the site that the data is from. */ final String siteIri; /** * If true, the bot flag will be set for all edits. This will only have * effect when logged in with a user account that is in the bot group. */ boolean editAsBot = false; /** * Creates an object to edit data via the Web API of the given * {@link ApiConnection} object. The site URI is necessary to create data * objects from API responses, since it is not contained in the data * retrieved from the URI. * * @param connection * ApiConnection * @param siteUri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" */ public WikibaseDataEditor(ApiConnection connection, String siteUri) { this.wbEditingAction = new WbEditingAction(connection, siteUri); this.wikibaseDataFetcher = new WikibaseDataFetcher(connection, siteUri); this.siteIri = siteUri; this.guidGenerator = new RandomGuidGenerator(); } /** * Creates an object to edit data via the Web API of the given * {@link ApiConnection} object. The site URI is necessary to create data * objects from API responses, since it is not contained in the data * retrieved from the URI. * * @param connection * ApiConnection * @param siteUri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" * @param generator * the generator to use when creating fresh GUIDs for statements, * snaks or references */ public WikibaseDataEditor(ApiConnection connection, String siteUri, GuidGenerator generator) { this.wbEditingAction = new WbEditingAction(connection, siteUri); this.wikibaseDataFetcher = new WikibaseDataFetcher(connection, siteUri); this.siteIri = siteUri; this.guidGenerator = generator; } WikibaseDataEditor(WbEditingAction action, WikibaseDataFetcher fetcher, String siteUri, GuidGenerator generator) { this.wbEditingAction = action; this.wikibaseDataFetcher = fetcher; this.siteIri = siteUri; this.guidGenerator = generator; } /** * Returns true if edits should be flagged as bot edits. See * {@link #setEditAsBot(boolean)} for details. * * @return whether to flag edits as bot */ public boolean editAsBot() { return this.editAsBot; } /** * Switches the use of the bot parameter on or of. When set to true, the bot * flag will be set for all edits. This will only have effect when logged in * with a user account that is in the bot group. Bot users should set this * to true in almost every case. * * @param editAsBot */ public void setEditAsBot(boolean editAsBot) { this.editAsBot = editAsBot; } /** * Returns the current value of the maxlag parameter. It specifies the * number of seconds. To save actions causing any more site replication lag, * this parameter can make the client wait until the replication lag is less * than the specified value. In case of excessive lag, error code "maxlag" * is returned upon API requests. * * @return current setting of the maxlag parameter */ public int getMaxLag() { return this.wbEditingAction.getMaxLag(); } /** * Set the value of the maxlag parameter. If unsure, keep the default. See * {@link WikibaseDataEditor#getMaxLag()} for details. * * @param maxLag * the new value in seconds */ public void setMaxLag(int maxLag) { this.wbEditingAction.setMaxLag(maxLag); } /** * Number of times we should retry if an editing action fails because * the lag is too high. */ public int getMaxLagMaxRetries() { return this.wbEditingAction.getMaxLagMaxRetries(); } /** * Number of times we should retry if an editing action fails because * the lag is too high. */ public void setMaxLagMaxRetries(int retries) { this.wbEditingAction.setMaxLagMaxRetries(retries); } /** * Initial wait time in milliseconds, when an edit fails for the first * time because of a high lag. This wait time is going to be multiplied * by maxLagBackOffFactor for the subsequent waits. */ public int getMaxLagFirstWaitTime() { return this.wbEditingAction.getMaxLagFirstWaitTime(); } /** * Initial wait time in milliseconds, when an edit fails for the first * time because of a high lag. This wait time is going to be multiplied * by maxLagBackOffFactor for the subsequent waits. */ public void setMaxLagFirstWaitTime(int time) { this.wbEditingAction.setMaxLagFirstWaitTime(time); } /** * Factor by which the wait time between two maxlag retries should be * multiplied at each attempt. */ public double getMaxLagBackOffFactor() { return this.wbEditingAction.getMaxLagBackOffFactor(); } /** * Factor by which the wait time between two maxlag retries should be * multiplied at each attempt. */ public void setMaxLagBackOffFactor(double value) { this.wbEditingAction.setMaxLagBackOffFactor(value); } /** * Returns the average time that a single edit should take, measured in * milliseconds. See {@link WbEditingAction#getAverageTimePerEdit()} for * details. * * @return average time per edit in milliseconds */ public int getAverageTimePerEdit() { return this.wbEditingAction.getAverageTimePerEdit(); } /** * Sets the average time that a single edit should take, measured in * milliseconds. See {@link WbEditingAction#getAverageTimePerEdit()} for * details. * * @param milliseconds * the new value in milliseconds */ public void setAverageTimePerEdit(int milliseconds) { this.wbEditingAction.setAverageTimePerEdit(milliseconds); } /** * Returns the number of edits that will be performed before entering * simulation mode, or -1 if there is no limit on the number of edits * (default). See {@link WbEditingAction#getRemainingEdits()} for * details. * * @return number of remaining edits */ public int getRemainingEdits() { return this.wbEditingAction.getRemainingEdits(); } /** * Sets the number of edits that this object can still perform. See * {@link WbEditingAction#setRemainingEdits(int)} for details. * * @param remainingEdits * number of edits that can still be performed, or -1 to disable * this limit (default setting) */ public void setRemainingEdits(int remainingEdits) { this.wbEditingAction.setRemainingEdits(remainingEdits); } /** * Sets the remaining edits for this component to 0, so that all edits are * simulated but not actually send to the API. */ public void disableEditing() { this.wbEditingAction.setRemainingEdits(0); } private EntityDocument createDocument( String type, EntityDocument document, String summary, List tags) throws IOException, MediaWikiApiErrorException { String data = JsonSerializer.getJsonString(document); return this.wbEditingAction.wbEditEntity( null, null, null, type, data, false, editAsBot, 0, summary, tags); } /** * Creates new entity document. Provided entity document must use a local item ID, * such as {@link ItemIdValue#NULL}, and its revision ID must be 0. *

    * The newly created document is returned. It will contain the new item ID and * revision ID. Note that the site IRI used in the item ID is not part of the * API response. The site IRI given when constructing this object is used in its * place. *

    * Statements in the provided document must not have IDs. *

    * Summary message will be prepended by an automatically generated comment. The * length limit of the autocomment together with the summary is 260 characters. * Everything above that limit will be cut off. * * @param document * document that contains the data to be written * @param summary * summary for the edit * @param tags * string identifiers of the tags to apply to the edit, {@code null} * or empty for no tags * @return newly created item document or {@code null} for simulated edit (see * {@link #disableEditing()} * @throws IOException * if there was an IO problem, such as missing network connection * @throws MediaWikiApiErrorException * if MediaWiki API returned an error response */ public EntityDocument createEntityDocument( EntityDocument document, String summary, List tags) throws IOException, MediaWikiApiErrorException { if (document instanceof ItemDocument) { return createItemDocument((ItemDocument) document, summary, tags); } else if (document instanceof PropertyDocument) { return createPropertyDocument((PropertyDocument) document, summary, tags); } else if (document instanceof LexemeDocument) { return createLexemeDocument((LexemeDocument) document, summary, tags); } else { throw new UnsupportedOperationException("Creation of entities of this type is not supported"); } } /** * Creates new item document. Provided item document must use a local item ID, * such as {@link ItemIdValue#NULL}, and its revision ID must be 0. *

    * The newly created document is returned. It will contain the new item ID and * revision ID. Note that the site IRI used in the item ID is not part of the * API response. The site IRI given when constructing this object is used in its * place. *

    * Statements in the provided document must not have IDs. *

    * Summary message will be prepended by an automatically generated comment. The * length limit of the autocomment together with the summary is 260 characters. * Everything above that limit will be cut off. * * @param document * document that contains the data to be written * @param summary * summary for the edit * @param tags * string identifiers of the tags to apply to the edit, {@code null} * or empty for no tags * @return newly created item document or {@code null} for simulated edit (see * {@link #disableEditing()} * @throws IOException * if there was an IO problem, such as missing network connection * @throws MediaWikiApiErrorException * if MediaWiki API returned an error response */ public ItemDocument createItemDocument( ItemDocument document, String summary, List tags) throws IOException, MediaWikiApiErrorException { return (ItemDocument) createDocument("item", document, summary, tags); } /** * Creates new property document. Provided property document must use a local * property ID, such as {@link PropertyIdValue#NULL}, and its revision ID must * be 0. *

    * The newly created document is returned. It will contain the new property ID * and revision ID. Note that the site IRI used in the property ID is not part * of the API response. The site IRI given when constructing this object is used * in its place. *

    * Statements in the provided document must not have IDs. *

    * Summary message will be prepended by an automatically generated comment. The * length limit of the autocomment together with the summary is 260 characters. * Everything above that limit will be cut off. * * @param document * document that contains the data to be written * @param summary * summary for the edit * @param tags * string identifiers of the tags to apply to the edit, {@code null} * or empty for no tags * @return newly created property document or {@code null} for simulated edit * (see {@link #disableEditing()} * @throws IOException * if there was an IO problem, such as missing network connection * @throws MediaWikiApiErrorException * if MediaWiki API returned an error response */ public PropertyDocument createPropertyDocument( PropertyDocument document, String summary, List tags) throws IOException, MediaWikiApiErrorException { return (PropertyDocument) createDocument("property", document, summary, tags); } /** * Creates new lexeme document. Provided lexeme document must use a local lexeme * ID, such as {@link LexemeIdValue#NULL}, and its revision ID must be 0. *

    * The newly created document is returned. It will contain the new lexeme ID and * revision ID. Note that the site IRI used in the lexeme ID is not part of the * API response. The site IRI given when constructing this object is used in its * place. *

    * Statements, senses, and forms in the provided document must not have IDs. *

    * Summary message will be prepended by an automatically generated comment. The * length limit of the autocomment together with the summary is 260 characters. * Everything above that limit will be cut off. * * @param document * document that contains the data to be written * @param summary * summary for the edit * @param tags * string identifiers of the tags to apply to the edit, {@code null} * or empty for no tags * @return newly created lexeme document or {@code null} for simulated edit (see * {@link #disableEditing()} * @throws IOException * if there was an IO problem, such as missing network connection * @throws MediaWikiApiErrorException * if MediaWiki API returned an error response */ public LexemeDocument createLexemeDocument( LexemeDocument document, String summary, List tags) throws IOException, MediaWikiApiErrorException { return (LexemeDocument) createDocument("lexeme", document, summary, tags); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Writes the data for the given item document with the summary message as * given. Optionally, the existing data is cleared (deleted). *

    * The id of the given item document is used to specify which item document * should be changed. The site IRI will be ignored for this. *

    * The revision id of the given item document is used to specify the base * revision, enabling the API to detect edit conflicts. The value 0 can be * used to omit this. It is strongly recommended to give a revision id when * making edits where the outcome depends on the previous state of the data * (i.e., any edit that does not use "clear"). *

    * If the data is not cleared, then the existing data will largely be * preserved. Statements with empty ids will be added without checking if * they exist already; statements with (valid) ids will replace any existing * statements with these ids or just be added if there are none. Labels, * descriptions, and aliases will be preserved for all languages for which * no data is given at all. For aliases this means that writing one alias in * a language will overwrite all aliases in this language, so some care is * needed. * * @param itemDocument * the document that contains the data to be written * @param clear * if true, the existing data will be replaced by the given data; * if false, the given data will be added to the existing data, * overwriting only parts that are set to new values * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * @return the modified item document, or null if there was an error * @throws IOException * if there was an IO problem, such as missing network * connection * @throws MediaWikiApiErrorException */ @Deprecated public ItemDocument editItemDocument(ItemDocument itemDocument, boolean clear, String summary, List tags) throws IOException, MediaWikiApiErrorException { String data = JsonSerializer.getJsonString(itemDocument); return (ItemDocument) this.wbEditingAction.wbEditEntity(itemDocument .getEntityId().getId(), null, null, null, data, clear, this.editAsBot, itemDocument.getRevisionId(), summary, tags); } /** * Updates {@link EntityDocument} entity. ID of the entity to update is taken * from the update object. Its site IRI is ignored. No action is taken if the * update is empty. *

    * If the update object references base revision of the document, its revision * ID is used to specify the base revision in the API request, enabling the API * to detect edit conflicts. It is strongly recommended to specify base revision * document in the update object. *

    * Summary message will be prepended by an automatically generated comment. The * length limit of the autocomment together with the summary is 260 characters. * Everything above that limit will be cut off. * * @param update * collection of changes to be written * @param clear * if set to {@code true}, existing entity data will be removed and * the update will be applied to empty entity * @param summary * summary for the edit * @param tags * string identifiers of the tags to apply to the edit, {@code null} * or empty for no tags * @throws IOException * if there was an IO problem, such as missing network connection * @throws MediaWikiApiErrorException * if MediaWiki API returned an error response */ public EditingResult editEntityDocument( EntityUpdate update, boolean clear, String summary, List tags) throws IOException, MediaWikiApiErrorException { long revisionId = update.getBaseRevisionId(); if (!clear) { if (update.isEmpty()) return new EditingResult(0L); if (update instanceof StatementDocumentUpdate) { StatementDocumentUpdate typed = (StatementDocumentUpdate) update; if (typed.getStatements().getAdded().size() == 1) { StatementDocumentUpdateBuilder builder = StatementDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); Statement statement = typed.getStatements().getAdded().stream().findFirst().get(); builder.updateStatements(StatementUpdateBuilder.create().add(statement).build()); if (builder.build().equals(update)) { String statementId = guidGenerator.freshStatementId(typed.getEntityId().getId()); Statement prepared = statement.withStatementId(statementId); JsonNode response = wbEditingAction.wbSetClaim(JsonSerializer.getJsonString(prepared), editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } if (typed.getStatements().getReplaced().size() == 1) { StatementDocumentUpdateBuilder builder = StatementDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); Statement statement = typed.getStatements().getReplaced().values().stream().findFirst().get(); builder.updateStatements(StatementUpdateBuilder.create().replace(statement).build()); if (builder.build().equals(update)) { JsonNode response = wbEditingAction.wbSetClaim(JsonSerializer.getJsonString(statement), editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } if (!typed.getStatements().getRemoved().isEmpty() && typed.getStatements().getRemoved().size() <= 50) { StatementDocumentUpdateBuilder builder = StatementDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); List statementIds = new ArrayList<>(typed.getStatements().getRemoved()); StatementUpdateBuilder statementBuilder = StatementUpdateBuilder.create(); for (String statementId : statementIds) { statementBuilder.remove(statementId); } builder.updateStatements(statementBuilder.build()); if (builder.build().equals(update)) { JsonNode response = wbEditingAction.wbRemoveClaims(statementIds, editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } } if (update instanceof LabeledStatementDocumentUpdate) { LabeledStatementDocumentUpdate typed = (LabeledStatementDocumentUpdate) update; if (typed.getLabels().getModified().size() == 1) { LabeledDocumentUpdateBuilder builder = LabeledDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); MonolingualTextValue label = typed.getLabels().getModified().values().stream().findFirst().get(); builder.updateLabels(TermUpdateBuilder.create().put(label).build()); if (builder.build().equals(update)) { JsonNode response = wbEditingAction.wbSetLabel(update.getEntityId().getId(), null, null, null, label.getLanguageCode(), label.getText(), editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } if (typed.getLabels().getRemoved().size() == 1) { LabeledDocumentUpdateBuilder builder = LabeledDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); String language = typed.getLabels().getRemoved().stream().findFirst().get(); builder.updateLabels(TermUpdateBuilder.create().remove(language).build()); if (builder.build().equals(update)) { JsonNode response = wbEditingAction.wbSetLabel(update.getEntityId().getId(), null, null, null, language, null, editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } } if (update instanceof TermedStatementDocumentUpdate) { TermedStatementDocumentUpdate typed = (TermedStatementDocumentUpdate) update; if (typed.getDescriptions().getModified().size() == 1) { TermedDocumentUpdateBuilder builder = TermedDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); MonolingualTextValue description = typed.getDescriptions().getModified() .values().stream().findFirst().get(); builder.updateDescriptions(TermUpdateBuilder.create().put(description).build()); if (builder.build().equals(update)) { JsonNode response = wbEditingAction.wbSetDescription(update.getEntityId().getId(), null, null, null, description.getLanguageCode(), description.getText(), editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } if (typed.getDescriptions().getRemoved().size() == 1) { TermedDocumentUpdateBuilder builder = TermedDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); String language = typed.getDescriptions().getRemoved().stream().findFirst().get(); builder.updateDescriptions(TermUpdateBuilder.create().remove(language).build()); if (builder.build().equals(update)) { JsonNode response = wbEditingAction.wbSetDescription(update.getEntityId().getId(), null, null, null, language, null, editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } if (typed.getAliases().size() == 1) { TermedDocumentUpdateBuilder builder = TermedDocumentUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); String language = typed.getAliases().keySet().stream().findFirst().get(); AliasUpdate aliases = typed.getAliases().get(language); builder.updateAliases(language, aliases); if (builder.build().equals(update)) { List added = !aliases.getAdded().isEmpty() ? aliases.getAdded().stream().map(a -> a.getText()).collect(toList()) : null; List removed = !aliases.getRemoved().isEmpty() ? aliases.getRemoved().stream().map(a -> a.getText()).collect(toList()) : null; List recreated = aliases.getRecreated() .map(l -> l.stream().map(a -> a.getText()).collect(toList())) .orElse(null); JsonNode response = wbEditingAction.wbSetAliases(update.getEntityId().getId(), null, null, null, language, added, removed, recreated, editAsBot, revisionId, summary, tags); return new EditingResult(getRevisionIdFromResponse(response)); } } } if (update instanceof LexemeUpdate) { LexemeUpdate typed = (LexemeUpdate) update; if (typed.getUpdatedSenses().size() == 1) { LexemeUpdateBuilder builder = LexemeUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); SenseUpdate sense = typed.getUpdatedSenses().values().stream().findFirst().get(); builder.updateSense(sense); if (builder.build().equals(update)) { return editEntityDocument(sense, false, summary, tags); } } if (typed.getUpdatedForms().size() == 1) { LexemeUpdateBuilder builder = LexemeUpdateBuilder .forBaseRevisionId(typed.getEntityId(), typed.getBaseRevisionId()); FormUpdate form = typed.getUpdatedForms().values().stream().findFirst().get(); builder.updateForm(form); if (builder.build().equals(update)) { return editEntityDocument(form, false, summary, tags); } } } } String data = JsonSerializer.getJsonString(update); EntityDocument document = wbEditingAction.wbEditEntity( update.getEntityId().getId(), null, null, null, data, clear, editAsBot, revisionId, summary, tags); return new EditingResult(document.getRevisionId()); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Writes the data for the given property document with the summary message * as given. Optionally, the existing data is cleared (deleted). *

    * The id of the given property document is used to specify which property * document should be changed. The site IRI will be ignored for this. *

    * The revision id of the given property document is used to specify the * base revision, enabling the API to detect edit conflicts. The value 0 can * be used to omit this. It is strongly recommended to give a revision id * when making edits where the outcome depends on the previous state of the * data (i.e., any edit that does not use "clear"). *

    * If the data is not cleared, then the existing data will largely be * preserved. Statements with empty ids will be added without checking if * they exist already; statements with (valid) ids will replace any existing * statements with these ids or just be added if there are none. Labels, * descriptions, and aliases will be preserved for all languages for which * no data is given at all. For aliases this means that writing one alias in * a language will overwrite all aliases in this language, so some care is * needed. * * @param propertyDocument * the document that contains the data to be written * @param clear * if true, the existing data will be replaced by the given data; * if false, the given data will be added to the existing data, * overwriting only parts that are set to new values * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * @return the modified property document, or null if there was an error * @throws IOException * if there was an IO problem, such as missing network * connection * @throws MediaWikiApiErrorException */ @Deprecated public PropertyDocument editPropertyDocument( PropertyDocument propertyDocument, boolean clear, String summary, List tags) throws IOException, MediaWikiApiErrorException { String data = JsonSerializer.getJsonString(propertyDocument); return (PropertyDocument) this.wbEditingAction.wbEditEntity( propertyDocument.getEntityId().getId(), null, null, null, data, clear, this.editAsBot, propertyDocument.getRevisionId(), summary, tags); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Writes the data for the given media info document with the summary message * as given. Optionally, the existing data is cleared (deleted). * It creates the media info if needed. *

    * The id of the given media info document is used to specify which media info * document should be changed or created. The site IRI will be ignored for this. *

    * The revision id of the given media info document is used to specify the * base revision, enabling the API to detect edit conflicts. The value 0 can * be used to omit this. It is strongly recommended to give a revision id * when making edits where the outcome depends on the previous state of the * data (i.e., any edit that does not use "clear"). *

    * If the data is not cleared, then the existing data will largely be * preserved. Statements with empty ids will be added without checking if * they exist already; statements with (valid) ids will replace any existing * statements with these ids or just be added if there are none. Labels * will be preserved for all languages for which no data is given at all. * For aliases this means that writing one alias in * * @param mediaInfoDocument * the document that contains the data to be written * @param clear * if true, the existing data will be replaced by the given data; * if false, the given data will be added to the existing data, * overwriting only parts that are set to new values * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * @return the modified media info document, or null if there was an error * @throws IOException * if there was an IO problem, such as missing network * connection */ @Deprecated public MediaInfoDocument editMediaInfoDocument( MediaInfoDocument mediaInfoDocument, boolean clear, String summary, List tags) throws IOException, MediaWikiApiErrorException { String data = JsonSerializer.getJsonString(mediaInfoDocument); return (MediaInfoDocument) this.wbEditingAction.wbEditEntity( mediaInfoDocument.getEntityId().getId(), null, null, null, data, clear, this.editAsBot, mediaInfoDocument.getRevisionId(), summary, tags); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Updates the statements of the item document identified by the given item * id. The updates are computed with respect to the current data found * online, making sure that no redundant deletions or duplicate insertions * happen. The references of duplicate statements will be merged. * * @param itemIdValue * id of the document to be updated * @param addStatements * the list of statements to be added or updated; statements with * empty statement id will be added; statements with non-empty * statement id will be updated (if such a statement exists) * @param deleteStatements * the list of statements to be deleted; statements will only be * deleted if they are present in the current document (in * exactly the same form, with the same id) * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * @return the updated document * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are IO problems, such as missing network connection */ @Deprecated public ItemDocument updateStatements(ItemIdValue itemIdValue, List addStatements, List deleteStatements, String summary, List tags) throws MediaWikiApiErrorException, IOException { ItemDocument currentDocument = (ItemDocument) this.wikibaseDataFetcher .getEntityDocument(itemIdValue.getId()); return updateStatements(currentDocument, addStatements, deleteStatements, summary, tags); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Updates the terms and statements of the item document identified by the * given item id. The updates are computed with respect to the current data * found online, making sure that no redundant deletions or duplicate insertions * happen. The references of duplicate statements will be merged. The labels * and aliases in a given language are kept distinct. * * @param itemIdValue * id of the document to be updated * @param addLabels * labels to be set on the item. They will overwrite existing values * in the same language. * @param addDescriptions * description to be set on the item. They will overwrite existing values * in the same language. * @param addAliases * aliases to be added. Existing aliases will be kept. * @param deleteAliases * aliases to be deleted. * @param addStatements * the list of statements to be added or updated; statements with * empty statement id will be added; statements with non-empty * statement id will be updated (if such a statement exists) * @param deleteStatements * the list of statements to be deleted; statements will only be * deleted if they are present in the current document (in * exactly the same form, with the same id) * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * @return the updated document * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are any IO errors, such as missing network connection */ @Deprecated public ItemDocument updateTermsStatements(ItemIdValue itemIdValue, List addLabels, List addDescriptions, List addAliases, List deleteAliases, List addStatements, List deleteStatements, String summary, List tags) throws MediaWikiApiErrorException, IOException { ItemDocument currentDocument = (ItemDocument) this.wikibaseDataFetcher .getEntityDocument(itemIdValue.getId()); return updateTermsStatements(currentDocument, addLabels, addDescriptions, addAliases, deleteAliases, addStatements, deleteStatements, summary, tags); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Updates the statements of the property document identified by the given * property id. The computation of updates is the same as for * {@link #updateStatements(ItemIdValue, List, List, String, List)}. * * @param propertyIdValue * id of the document to be updated * @param addStatements * the list of statements to be added or updated; statements with * empty statement id will be added; statements with non-empty * statement id will be updated (if such a statement exists) * @param deleteStatements * the list of statements to be deleted; statements will only be * deleted if they are present in the current document (in * exactly the same form, with the same id) * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * @return the updated document * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are IO problems, such as missing network connection */ @Deprecated public PropertyDocument updateStatements(PropertyIdValue propertyIdValue, List addStatements, List deleteStatements, String summary, List tags) throws MediaWikiApiErrorException, IOException { PropertyDocument currentDocument = (PropertyDocument) this.wikibaseDataFetcher .getEntityDocument(propertyIdValue.getId()); return updateStatements(currentDocument, addStatements, deleteStatements, summary, tags); } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Updates statements of the given document. The document should be the * current revision of the data that is to be updated. The updates are * computed with respect to the data found in the document, making sure that * no redundant deletions or duplicate insertions happen. The references of * duplicate statements will be merged. *

    * The generic type T of this method must be a general interface such as * {@link ItemDocument}, {@link PropertyDocument}, or * {@link StatementDocument}. Specific implementations of these interfaces * are not permitted. * * @param currentDocument * the document that is to be updated; needs to have a correct * revision id and entity id * @param addStatements * the list of statements to be added or updated; statements with * empty statement id will be added; statements with non-empty * statement id will be updated (if such a statement exists) * @param deleteStatements * the list of statements to be deleted; statements will only be * deleted if they are present in the current document (in * exactly the same form, with the same id) * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * Ignored if null or empty. * @return the updated document * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are IO problems, such as missing network connection */ @SuppressWarnings("unchecked") @Deprecated public T updateStatements(T currentDocument, List addStatements, List deleteStatements, String summary, List tags) throws MediaWikiApiErrorException, IOException { StatementUpdate statementUpdate = new StatementUpdate(currentDocument, addStatements, deleteStatements); statementUpdate.setGuidGenerator(guidGenerator); if (statementUpdate.isEmptyEdit()) { return currentDocument; } else { return (T) this.wbEditingAction.wbEditEntity(currentDocument .getEntityId().getId(), null, null, null, statementUpdate .getJsonUpdateString(), false, this.editAsBot, currentDocument .getRevisionId(), summary, tags); } } /** * @deprecated Use {@link #editEntityDocument(EntityUpdate, boolean, String, List)} instead. * Updates the terms and statements of the current document. * The updates are computed with respect to the current data in the document, * making sure that no redundant deletions or duplicate insertions * happen. The references of duplicate statements will be merged. The labels * and aliases in a given language are kept distinct. * * @param currentDocument * the document to be updated; needs to have a correct revision id and * entity id * @param addLabels * labels to be set on the item. They will overwrite existing values * in the same language. * @param addDescriptions * description to be set on the item. They will overwrite existing values * in the same language. * @param addAliases * aliases to be added. Existing aliases will be kept. * @param deleteAliases * aliases to be deleted. * @param addStatements * the list of statements to be added or updated; statements with * empty statement id will be added; statements with non-empty * statement id will be updated (if such a statement exists) * @param deleteStatements * the list of statements to be deleted; statements will only be * deleted if they are present in the current document (in * exactly the same form, with the same id) * @param summary * summary for the edit; will be prepended by an automatically * generated comment; the length limit of the autocomment * together with the summary is 260 characters: everything above * that limit will be cut off * @param tags * string identifiers of the tags to apply to the edit. * Ignored if null or empty. * @return the updated document * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are any IO errors, such as missing network connection */ @SuppressWarnings("unchecked") @Deprecated public T updateTermsStatements(T currentDocument, List addLabels, List addDescriptions, List addAliases, List deleteAliases, List addStatements, List deleteStatements, String summary, List tags) throws MediaWikiApiErrorException, IOException { TermStatementUpdate termStatementUpdate = new TermStatementUpdate( currentDocument, addStatements, deleteStatements, addLabels, addDescriptions, addAliases, deleteAliases); termStatementUpdate.setGuidGenerator(guidGenerator); return (T) termStatementUpdate.performEdit(wbEditingAction, editAsBot, summary, tags); } /** * Performs a null edit on an entity. This has some effects on Wikibase, such as * refreshing the labels of the referred items in the UI. * * @param entityId * the document to perform a null edit on * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are any IO errors, such as missing network connection */ public void nullEdit(EntityIdValue entityId) throws IOException, MediaWikiApiErrorException { nullEdit(wikibaseDataFetcher.getEntityDocument(entityId.getId())); } /** * @deprecated Use {@link #nullEdit(EntityIdValue)} instead. * Performs a null edit on an item. This has some effects on Wikibase, * such as refreshing the labels of the referred items in the UI. * * @param itemId * the document to perform a null edit on * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are any IO errors, such as missing network connection */ @Deprecated public void nullEdit(ItemIdValue itemId) throws IOException, MediaWikiApiErrorException { ItemDocument currentDocument = (ItemDocument) this.wikibaseDataFetcher .getEntityDocument(itemId.getId()); nullEdit(currentDocument); } /** * @deprecated Use {@link #nullEdit(EntityIdValue)} instead. * Performs a null edit on a property. This has some effects on Wikibase, * such as refreshing the labels of the referred items in the UI. * * @param propertyId * the document to perform a null edit on * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are any IO errors, such as missing network connection */ @Deprecated public void nullEdit(PropertyIdValue propertyId) throws IOException, MediaWikiApiErrorException { PropertyDocument currentDocument = (PropertyDocument) this.wikibaseDataFetcher .getEntityDocument(propertyId.getId()); nullEdit(currentDocument); } /** * Performs a null edit on an entity. This has some effects on Wikibase, such as * refreshing the labels of the referred items in the UI. * * @param currentDocument * the document to perform a null edit on * @return new version of the document returned by Wikibase API * @throws MediaWikiApiErrorException * if the API returns errors * @throws IOException * if there are any IO errors, such as missing network connection */ @SuppressWarnings("unchecked") public T nullEdit(T currentDocument) throws IOException, MediaWikiApiErrorException { EntityUpdate update = EntityUpdateBuilder.forBaseRevision(currentDocument).build(); return (T) wbEditingAction.wbEditEntity(currentDocument.getEntityId().getId(), null, null, null, JsonSerializer.getJsonString(update), false, editAsBot, currentDocument.getRevisionId(), null, null); } /** * Extracts the last revision id from the JSON response returned * by the API after an edit * * @param response * the response as returned by Mediawiki * @return * the new revision id of the edited entity * @throws JsonProcessingException */ protected long getRevisionIdFromResponse(JsonNode response) throws JsonProcessingException { if(response == null) { throw new MalformedResponseException("API response is null"); } JsonNode entity = null; if(response.has("entity")) { entity = response.path("entity"); } else if(response.has("pageinfo")) { entity = response.path("pageinfo"); } if(entity != null && entity.has("lastrevid")) { return entity.path("lastrevid").asLong(); } throw new MalformedResponseException("The last revision id could not be found in API response"); } } WikibaseDataFetcher.java000066400000000000000000000451431444772566300351500ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.util.*; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.DocumentDataFilter; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; /** * Simple class to fetch data from Wikibase via the online API. * * @author Markus Kroetzsch * @author Michael Guenther */ public class WikibaseDataFetcher { /** * API Action to fetch data. */ final WbGetEntitiesAction wbGetEntitiesAction; final WbSearchEntitiesAction wbSearchEntitiesAction; final MediaInfoIdQueryAction mediaInfoIdQueryAction; /** * The IRI that identifies the site that the data is from. */ final String siteIri; /** * Filter that is used to restrict API requests. */ private final DocumentDataFilter filter = new DocumentDataFilter(); /** * Maximal value for the size of a list that can be processed by the * Wikibase API in one cycle */ int maxListSize = 50; /** * Creates an object to fetch data from wikidata.org. This convenience * method creates a default {@link ApiConnection} that is not logged in. To * use an existing connection, the constructor * {@link #WikibaseDataFetcher(ApiConnection, String)} should be called, * using {@link Datamodel#SITE_WIKIDATA} as a site URI. */ public static WikibaseDataFetcher getWikidataDataFetcher() { return new WikibaseDataFetcher( BasicApiConnection.getWikidataApiConnection(), Datamodel.SITE_WIKIDATA); } /** * Creates an object to fetch data from commons.wikimedia.org. This convenience * method creates a default {@link ApiConnection} that is not logged in. To * use an existing connection, the constructor * {@link #WikibaseDataFetcher(ApiConnection, String)} should be called, * using {@link Datamodel#SITE_WIKIMEDIA_COMMONS} as a site URI. */ public static WikibaseDataFetcher getWikimediaCommonsDataFetcher() { return new WikibaseDataFetcher( BasicApiConnection.getWikimediaCommonsApiConnection(), Datamodel.SITE_WIKIMEDIA_COMMONS); } /** * Creates an object to fetch data from API with the given * {@link ApiConnection} object. The site URI is necessary since it is not * contained in the data retrieved from the URI. * * @param connection * ApiConnection * @param siteUri * the URI identifying the site that is accessed (usually the * prefix of entity URIs), e.g., * "http://www.wikidata.org/entity/" */ public WikibaseDataFetcher(ApiConnection connection, String siteUri) { this.wbGetEntitiesAction = new WbGetEntitiesAction(connection, siteUri); this.wbSearchEntitiesAction = new WbSearchEntitiesAction(connection, siteUri); this.mediaInfoIdQueryAction = new MediaInfoIdQueryAction(connection, siteUri); this.siteIri = siteUri; } /** * Returns the {@link DocumentDataFilter} object that is used to filter API * requests. Settings made in this object will affect the API request, and * often lead to reduced network traffic and better performance. *

    * Note: Filtering individual properties is currently not supported (such * filters will be ignored). However, filtering all properties is possible; * in this case all statements are excluded. * * @return the filter used by this object */ public DocumentDataFilter getFilter() { return this.filter; } /** * Fetches the documents for the entity of the given string IDs. The result * is an {@link EntityDocument} or null if the data could not be fetched. * * @param entityId * string IDs (e.g., "P31" or "Q42") of requested entity * @return retrieved entity document or null * @throws MediaWikiApiErrorException * @throws IOException */ public EntityDocument getEntityDocument(String entityId) throws MediaWikiApiErrorException, IOException { return getEntityDocuments(entityId).get(entityId); } /** * Fetches the documents for the entities of the given string IDs. The * result is a map from entity IDs to {@link EntityDocument} objects. It is * possible that a requested ID could not be found: then this key is not set * in the map. * * @param entityIds * string IDs (e.g., "P31", "Q42") of requested entities * @return map from IDs for which data could be found to the documents that * were retrieved * @throws MediaWikiApiErrorException * @throws IOException */ public Map getEntityDocuments(String... entityIds) throws MediaWikiApiErrorException, IOException { return getEntityDocuments(Arrays.asList(entityIds)); } /** * Fetches the documents for the entities of the given string IDs. The * result is a map from entity IDs to {@link EntityDocument} objects. It is * possible that a requested ID could not be found: then this key is not set * in the map. * * @param entityIds * list of string IDs (e.g., "P31", "Q42") of requested entities * @return map from IDs for which data could be found to the documents that * were retrieved * @throws MediaWikiApiErrorException * @throws IOException */ public Map getEntityDocuments(List entityIds) throws MediaWikiApiErrorException, IOException { Map result = new HashMap<>(); List newEntityIds = new ArrayList<>(entityIds); boolean moreItems = !newEntityIds.isEmpty(); while (moreItems) { List subListOfEntityIds; if (newEntityIds.size() <= maxListSize) { subListOfEntityIds = newEntityIds; moreItems = false; } else { subListOfEntityIds = newEntityIds.subList(0, maxListSize); } WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.ids = ApiConnection.implodeObjects(subListOfEntityIds); result.putAll(getEntityDocumentMap(subListOfEntityIds.size(), properties)); subListOfEntityIds.clear(); } return result; } /** * Fetches the document for the entity that has a page of the given title on * the given site. Site keys should be some site identifier known to the * Wikibase site that is queried, such as "enwiki" for Wikidata.org. * * It could also be used to retrieve Wikimedia Commons MediaInfo entities * using the siteKey "commonswiki" and the file title (with the File: prefix) for title. *

    * Note: This method will not work properly if a filter is set for sites * that excludes the requested site. * * @param siteKey * wiki site id, e.g., "enwiki" * @param title * string titles (e.g. "Douglas Adams") of requested entities * @return document for the entity with this title, or null if no such * document exists * @throws MediaWikiApiErrorException * @throws IOException */ public EntityDocument getEntityDocumentByTitle(String siteKey, String title) throws MediaWikiApiErrorException, IOException { return getEntityDocumentsByTitle(siteKey, title).get(title); } /** * Fetches the documents for the entities that have pages of the given * titles on the given site. Site keys should be some site identifier known * to the Wikibase site that is queried, such as "enwiki" for Wikidata.org. *

    * Note: This method will not work properly if a filter is set for sites * that excludes the requested site. * * @param siteKey * wiki site id, e.g. "enwiki" * @param titles * list of string titles (e.g. "Douglas Adams") of requested * entities * @return map from titles for which data could be found to the documents * that were retrieved * @throws MediaWikiApiErrorException * @throws IOException */ public Map getEntityDocumentsByTitle( String siteKey, String... titles) throws MediaWikiApiErrorException, IOException { return getEntityDocumentsByTitle(siteKey, Arrays.asList(titles)); } /** * Fetches the documents for the entities that have pages of the given * titles on the given site. Site keys should be some site identifier known * to the Wikibase site that is queried, such as "enwiki" for Wikidata.org. *

    * Note: This method will not work properly if a filter is set for sites * that excludes the requested site. * * @param siteKey * wiki site id, e.g. "enwiki" * @param titles * list of string titles (e.g. "Douglas Adams") of requested * entities * @return map from titles for which data could be found to the documents * that were retrieved * @throws MediaWikiApiErrorException * @throws IOException */ public Map getEntityDocumentsByTitle( String siteKey, List titles) throws MediaWikiApiErrorException, IOException { List newTitles = new ArrayList<>(titles); Map result = new HashMap<>(); boolean moreItems = !newTitles.isEmpty(); while (moreItems) { List subListOfTitles; if (newTitles.size() <= maxListSize) { subListOfTitles = newTitles; moreItems = false; } else { subListOfTitles = newTitles.subList(0, maxListSize); } WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.titles = ApiConnection.implodeObjects(subListOfTitles); properties.sites = siteKey; result.putAll(getEntityDocumentMap(subListOfTitles.size(), properties)); subListOfTitles.clear(); } return result; } /** * Fetches the MediaInfoId of a file with the given name. * * This method only works with file name (e.g. "File:Albert Einstein Head.jpg"). * The "File:" prefix can be omitted, in this case, it will be automatically added during processing. * For example, "Albert Einstein Head.jpg" will be processed as "File:Albert Einstein Head.jpg". * * Notice that pages other than file pages will also be fitted with the "File:" prefix. * For example, "Main Page" will be processed as "File:Main Page", which doesn't exist. * So always make sure you are dealing with file name. * * Use this method for speeding up if you only need the id information, * i.e. you don't need other information like labels, descriptions, statements, etc. * Otherwise, you may need to use * {@link WikibaseDataFetcher#getEntityDocumentByTitle(String siteKey, String title)} * * @param fileName * file name (e.g. "File:Albert Einstein Head.jpg" or "Albert Einstein Head.jpg") * of the requested MediaInfoId, the "File:" prefix can be omitted * @return the corresponding MediaInfoId for the file name, or null if not found * @throws IOException * @throws MediaWikiApiErrorException */ public MediaInfoIdValue getMediaInfoIdByFileName(String fileName) throws IOException, MediaWikiApiErrorException { return getMediaInfoIdsByFileName(fileName).get(fileName); } /** * Fetches the MediaInfoIds of files with the given names. * * This method only works with file name (e.g. "File:Albert Einstein Head.jpg"). * The "File:" prefix can be omitted, in this case, it will be automatically added during processing. * For example, "Albert Einstein Head.jpg" will be processed as "File:Albert Einstein Head.jpg". * * Notice that pages other than file pages will also be fitted with the "File:" prefix. * For example, "Main Page" will be processed as "File:Main Page", which doesn't exist. * So always make sure you are dealing with file name. * * Use this method for speeding up if you only need the id information, * i.e. you don't need other information like labels, descriptions, statements, etc. * Otherwise, you may need to use * {@link WikibaseDataFetcher#getEntityDocumentsByTitle(String siteKey, String... titles)} * * @param fileNames * list of file names of the requested MediaInfoIds * @return map from file names for which data could be found to the MediaInfoIds * that were retrieved * @throws IOException * @throws MediaWikiApiErrorException */ public Map getMediaInfoIdsByFileName(String... fileNames) throws IOException, MediaWikiApiErrorException { return getMediaInfoIdsByFileName(Arrays.asList(fileNames)); } /** * Fetches the MediaInfoIds of files with the given names. * * This method only works with file name (e.g. "File:Albert Einstein Head.jpg"). * The "File:" prefix can be omitted, in this case, it will be automatically added during processing. * For example, "Albert Einstein Head.jpg" will be processed as "File:Albert Einstein Head.jpg". * * Notice that pages other than file pages will also be fitted with the "File:" prefix. * For example, "Main Page" will be processed as "File:Main Page", which doesn't exist. * So always make sure you are dealing with file name. * * Use this method for speeding up if you only need the id information, * i.e. you don't need other information like labels, descriptions, statements, etc. * Otherwise, you may need to use * {@link WikibaseDataFetcher#getEntityDocumentsByTitle(String siteKey, List titles)} * * @param fileNames * list of file names of the requested MediaInfoIds * @return map from file names for which data could be found to the MediaInfoIds * that were retrieved * @throws IOException * @throws MediaWikiApiErrorException */ public Map getMediaInfoIdsByFileName(List fileNames) throws IOException, MediaWikiApiErrorException { List newFileNames = new ArrayList<>(fileNames); Map result = new HashMap<>(); boolean moreItems = !newFileNames.isEmpty(); while (moreItems) { List subListOfFileNames; if (newFileNames.size() <= maxListSize) { subListOfFileNames = newFileNames; moreItems = false; } else { subListOfFileNames = newFileNames.subList(0, maxListSize); } result.putAll(mediaInfoIdQueryAction.getMediaInfoIds(subListOfFileNames)); subListOfFileNames.clear(); } return result; } /** * Creates a map of identifiers or page titles to documents retrieved via * the APIs. * * @param numOfEntities * number of entities that should be retrieved * @param properties * WbGetEntitiesProperties object that includes all relevant * parameters for the wbgetentities action * @return map of document identifiers or titles to documents retrieved via * the API URL * @throws MediaWikiApiErrorException * @throws IOException */ Map getEntityDocumentMap(int numOfEntities, WbGetEntitiesActionData properties) throws MediaWikiApiErrorException, IOException { if (numOfEntities == 0) { return Collections.emptyMap(); } configureProperties(properties); return this.wbGetEntitiesAction.wbGetEntities(properties); } public List searchEntities(String search) throws MediaWikiApiErrorException, IOException { WbGetEntitiesSearchData properties = new WbGetEntitiesSearchData(); properties.search = search; properties.language = "en"; return searchEntities(properties); } public List searchEntities(String search, String language) throws MediaWikiApiErrorException, IOException { WbGetEntitiesSearchData properties = new WbGetEntitiesSearchData(); properties.search = search; properties.language = language; return searchEntities(properties); } public List searchEntities(String search, Long limit) throws MediaWikiApiErrorException, IOException { WbGetEntitiesSearchData properties = new WbGetEntitiesSearchData(); properties.search = search; properties.language = "en"; properties.limit = limit; return searchEntities(properties); } public List searchEntities(String search, String language, Long limit) throws MediaWikiApiErrorException, IOException { WbGetEntitiesSearchData properties = new WbGetEntitiesSearchData(); properties.search = search; properties.language = language; properties.limit = limit; return searchEntities(properties); } public List searchEntities(WbGetEntitiesSearchData properties) throws MediaWikiApiErrorException, IOException { return this.wbSearchEntitiesAction.wbSearchEntities(properties); } /** * Configures props, languages and sitefilter properties. * * @param properties */ void configureProperties(WbGetEntitiesActionData properties) { setRequestProps(properties); setRequestLanguages(properties); setRequestSitefilter(properties); } /** * Sets the value for the API's "props" parameter based on the current * settings. * * @param properties * current setting of parameters */ private void setRequestProps(WbGetEntitiesActionData properties) { StringBuilder builder = new StringBuilder(); builder.append("info|datatype"); if (!this.filter.excludeAllLanguages()) { builder.append("|labels|aliases|descriptions"); } if (!this.filter.excludeAllProperties()) { builder.append("|claims"); } if (!this.filter.excludeAllSiteLinks()) { builder.append("|sitelinks"); } properties.props = builder.toString(); } /** * Sets the value for the API's "languages" parameter based on the current * settings. * * @param properties * current setting of parameters */ private void setRequestLanguages(WbGetEntitiesActionData properties) { if (this.filter.excludeAllLanguages() || this.filter.getLanguageFilter() == null) { return; } properties.languages = ApiConnection.implodeObjects(this.filter .getLanguageFilter()); } /** * Sets the value for the API's "sitefilter" parameter based on the current * settings. * * @param properties * current setting of parameters */ private void setRequestSitefilter(WbGetEntitiesActionData properties) { if (this.filter.excludeAllSiteLinks() || this.filter.getSiteLinkFilter() == null) { return; } properties.sitefilter = ApiConnection.implodeObjects(this.filter .getSiteLinkFilter()); } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrors/000077500000000000000000000000001444772566300325325ustar00rootroot00000000000000AssertUserFailedException.java000066400000000000000000000023431444772566300404040ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception to indicate that we tried to perform an action while our login * credentials have expired. See * MediaWiki documentation. * * @author Antonin Delpeuch * */ public class AssertUserFailedException extends MediaWikiApiErrorException { private static final long serialVersionUID = -619052434831693831L; public AssertUserFailedException(String errorMessage) { super(MediaWikiApiErrorHandler.ERROR_ASSERT_USER_FAILED, errorMessage); } } EditConflictErrorException.java000066400000000000000000000024401444772566300405560ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception to indicate a MediaWiki API error caused by an edit conflict. * * @author Markus Kroetzsch * */ public class EditConflictErrorException extends MediaWikiApiErrorException { private static final long serialVersionUID = 3603929976083601076L; /** * Creates a new exception. * * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user */ public EditConflictErrorException(String errorMessage) { super(MediaWikiApiErrorHandler.ERROR_EDIT_CONFLICT, errorMessage); } } MaxlagErrorException.java000066400000000000000000000041641444772566300374250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception to indicate a MediaWiki API error caused by exceeding the maxlag * parameter. See MediaWiki * documentation. * * @author Markus Kroetzsch * */ public class MaxlagErrorException extends MediaWikiApiErrorException { private static final long serialVersionUID = -4013361654647685959L; protected double lag = 0; /** * Creates a new exception. * * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user */ public MaxlagErrorException(String errorMessage) { super(MediaWikiApiErrorHandler.ERROR_MAXLAG, errorMessage); } /** * Creates an exception which also stores the lag announced by the server. * * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user * @param lag * the value of the reported lag, in seconds */ public MaxlagErrorException(String errorMessage, double lag) { super(MediaWikiApiErrorHandler.ERROR_MAXLAG, errorMessage); this.lag = lag; } /** * Retrieves the amount of lag announced by the server when this * error was emitted. May return 0 if the lag was not extracted from * the server response. * * @return * the lag on the target server */ public double getLag() { return lag; } } MediaWikiApiErrorException.java000066400000000000000000000036621444772566300405130ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception for reporting general MediaWiki API errors. * * @author Markus Kroetzsch * */ public class MediaWikiApiErrorException extends Exception { final String errorCode; final String errorMessage; private static final long serialVersionUID = 7834254856687745000L; /** * Creates a new exception for the given error code and message. * * @param errorCode * MediaWiki reported error code * @param errorMessage * MediaWiki reported error message, or any other human-readable * message string generated locally */ public MediaWikiApiErrorException(String errorCode, String errorMessage) { super("[" + errorCode + "] " + errorMessage); this.errorCode = errorCode; this.errorMessage = errorMessage; } /** * Returns the MediaWiki code of the error that has causes this exception. * * @return error code */ public String getErrorCode() { return this.errorCode; } /** * Returns the MediaWiki message string for the error that has causes this * exception. Note that this is only part of the exception message obtained * by {@link #getMessage()}. * * @return error message */ public String getErrorMessage() { return this.errorMessage; } } MediaWikiApiErrorHandler.java000066400000000000000000000047611444772566300401330ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Class to interpret MediaWiki API errors. * * @author Markus Kroetzsch * */ public class MediaWikiApiErrorHandler { public final static String ERROR_EDIT_CONFLICT = "editconflict"; public final static String ERROR_NO_TOKEN = "notoken"; public final static String ERROR_INVALID_TOKEN = "badtoken"; public final static String ERROR_NO_SUCH_ENTITY = "no-such-entity"; public final static String ERROR_MAXLAG = "maxlag"; public final static String ERROR_ASSERT_USER_FAILED = "assertuserfailed"; public final static String ERROR_TAGS_APPLY_NOT_ALLOWED = "tags-apply-not-allowed-one"; /** * Creates and throws a suitable {@link MediaWikiApiErrorException} for the * given error code and message. * * @param errorCode * the error code reported by MediaWiki * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user * @throws MediaWikiApiErrorException * in all cases, but may throw a subclass for some errors */ public static void throwMediaWikiApiErrorException(String errorCode, String errorMessage) throws MediaWikiApiErrorException { switch (errorCode) { case ERROR_NO_TOKEN: case ERROR_INVALID_TOKEN: throw new TokenErrorException(errorCode, errorMessage); case ERROR_EDIT_CONFLICT: throw new EditConflictErrorException(errorMessage); case ERROR_NO_SUCH_ENTITY: throw new NoSuchEntityErrorException(errorMessage); case ERROR_MAXLAG: throw new MaxlagErrorException(errorMessage); case ERROR_ASSERT_USER_FAILED: throw new AssertUserFailedException(errorMessage); case ERROR_TAGS_APPLY_NOT_ALLOWED: throw new TagsApplyNotAllowedException(errorMessage); default: throw new MediaWikiApiErrorException(errorCode, errorMessage); } } } NoSuchEntityErrorException.java000066400000000000000000000025031444772566300406030ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception to indicate a MediaWiki API error caused by trying to access an * entity that does not exist. * * @author Markus Kroetzsch * */ public class NoSuchEntityErrorException extends MediaWikiApiErrorException { private static final long serialVersionUID = -6500316776536101550L; /** * Creates a new exception. * * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user */ public NoSuchEntityErrorException(String errorMessage) { super(MediaWikiApiErrorHandler.ERROR_NO_SUCH_ENTITY, errorMessage); } } TagsApplyNotAllowedException.java000066400000000000000000000025131444772566300410730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2019 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Error thrown when one of the supplied tags for an edit does not exist * or cannot be applied manually. * * @author Antonin Delpeuch */ public class TagsApplyNotAllowedException extends MediaWikiApiErrorException { private static final long serialVersionUID = -6500316749253610150L; /** * Creates a new exception. * * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user */ public TagsApplyNotAllowedException(String errorMessage) { super(MediaWikiApiErrorHandler.ERROR_TAGS_APPLY_NOT_ALLOWED, errorMessage); } } TokenErrorException.java000066400000000000000000000025251444772566300372730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * Exception to indicate a MediaWiki API error caused by missing or invalid * token. * * @author Markus Kroetzsch * */ public class TokenErrorException extends MediaWikiApiErrorException { private static final long serialVersionUID = 3603929976083601076L; /** * Creates a new exception. * * @param errorCode * the error code reported by MediaWiki * @param errorMessage * the error message reported by MediaWiki, or any other * meaningful message for the user */ public TokenErrorException(String errorCode, String errorMessage) { super(errorCode, errorMessage); } } package-info.java000066400000000000000000000015151444772566300336360ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/main/java/org/wikidata/wdtk/wikibaseapi/** * Components for accessing the Web API of a Wikibase website, such as wikidata.org. * * @author Markus Kroetzsch * */ package org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/000077500000000000000000000000001444772566300220115ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/000077500000000000000000000000001444772566300227325ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/000077500000000000000000000000001444772566300235215ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/000077500000000000000000000000001444772566300253165ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/000077500000000000000000000000001444772566300262675ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapi/000077500000000000000000000000001444772566300305575ustar00rootroot00000000000000BasicApiConnectionTest.java000066400000000000000000000407321444772566300357040ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.net.HttpCookie; import java.net.URL; import java.net.URLEncoder; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeSet; import org.apache.commons.lang3.tuple.ImmutablePair; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Before; import org.junit.Test; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.wikidata.wdtk.testing.MockStringContentFactory; import org.wikidata.wdtk.wikibaseapi.apierrors.AssertUserFailedException; import org.wikidata.wdtk.wikibaseapi.apierrors.MaxlagErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import static org.junit.Assert.*; public class BasicApiConnectionTest { private final ObjectMapper mapper = new ObjectMapper(); private static MockWebServer server; private BasicApiConnection connection; private String LOGGED_IN_SERIALIZED_CONNECTION = "{\"baseUrl\":\"" + server.url("/w/api.php") + "\",\"cookies\":[{\"name\":\"GeoIP\",\"value\":\"DE:13:Dresden:51.0500:13.7500:v4\",\"comment\":null,\"commentURL\":null,\"domain\":\"domain comparison should be skipped\",\"maxAge\":-1,\"path\":\"/\",\"portlist\":null,\"secure\":false,\"httpOnly\":false,\"version\":0,\"discard\":false},{\"name\":\"testwikidatawikiSession\",\"value\":\"c18ef92637227283bcda73bcf95cfaf5\",\"comment\":null,\"commentURL\":null,\"domain\":\"domain comparison should be skipped\",\"maxAge\":-1,\"path\":\"/\",\"portlist\":null,\"secure\":true,\"httpOnly\":true,\"version\":0,\"discard\":false}],\"username\":\"username\",\"loggedIn\":true,\"tokens\":{\"login\":\"b5780b6e2f27e20b450921d9461010b4\"},\"connectTimeout\":5000,\"readTimeout\":6000}"; Set split(String str, char ch) { Set set = new TreeSet<>(); StringTokenizer stok = new StringTokenizer(str, "" + ch); while (stok.hasMoreTokens()) { set.add(stok.nextToken().trim()); } return set; } private static MockResponse makeJsonResponseFrom(String path) throws IOException { String body = MockStringContentFactory.getStringFromUrl(BasicApiConnectionTest.class.getResource(path)); return new MockResponse() .addHeader("Content-Type", "application/json; charset=utf-8") .addHeader("Set-Cookie", "WMF-Last-Access=18-Aug-2015;Path=/;HttpOnly;Expires=Sat, 19 Sep 2015 12:00:00 GMT") .addHeader("Set-Cookie", "GeoIP=DE:13:Dresden:51.0500:13.7500:v4; Path=/; Domain=" + server.getHostName()) .addHeader("Set-Cookie", "testwikidatawikiSession=c18ef92637227283bcda73bcf95cfaf5; path=/; secure; httponly") .setBody(body); } @BeforeClass public static void init() throws Exception { Dispatcher dispatcher = new Dispatcher() { @Override public MockResponse dispatch(RecordedRequest request) throws InterruptedException { if ("/w/api.php?languages=fr&format=json&action=wbgetentities&ids=Q8&sitefilter=enwiki&props=info".equals(request.getPath())) { return new MockResponse() .setHeader("Content-Type", "application/json; charset=utf-8") .setBody("{\"entities\":{\"Q8\":{\"pageid\":134,\"ns\":0,\"title\":\"Q8\",\"lastrevid\":1174289176,\"modified\":\"2020-05-05T12:39:07Z\",\"type\":\"item\",\"id\":\"Q8\"}},\"success\":1}\n"); } try { String requestBody = request.getBody().readUtf8(); // in the case of file uploads, the string representation of the request body is not stable // so we only check that some file was uploaded (for testPostFile) if (requestBody.contains("Content-Disposition: form-data; name=\"file\"; filename=\"hello.txt\"") && requestBody.contains("multipart/form-data")) { return new MockResponse() .setHeader("Content-Type", "application/json; charset=utf-8") .setBody("{\"success\":\"true\"}"); } // otherwise, check for equality on the request body switch (requestBody) { case "meta=tokens&format=json&action=query&type=login": return makeJsonResponseFrom("/query-login-token.json"); case "lgtoken=b5780b6e2f27e20b450921d9461010b4&lgpassword=password&format=json&action=login&lgname=username": return makeJsonResponseFrom("/loginSuccess.json"); case "lgtoken=b5780b6e2f27e20b450921d9461010b4&lgpassword=password1&format=json&action=login&lgname=username1": return makeJsonResponseFrom("/loginError.json"); case "lgtoken=b5780b6e2f27e20b450921d9461010b4&lgpassword=password2&format=json&action=login&lgname=username": return makeJsonResponseFrom("/loginFailed.json"); case "meta=tokens&assert=user&format=json&action=query&type=csrf": return makeJsonResponseFrom("/query-csrf-token-loggedin-response.json"); case "assert=user&format=json&action=logout&token=42307b93c79b0cb558d2dfb4c3c92e0955e06041%2B%5C": return new MockResponse().setHeader("Content-Type", "application/json; charset=utf-8").setBody("{}"); case "assert=user&format=json&action=query": return makeJsonResponseFrom("/assert-user-failed.json"); } // finally check clientLogin. This uses server.url, so cannot be used in switch statement because it is not constant. String url = server.url("/w/api.php").toString(); String encodedUrl = URLEncoder.encode(url, "UTF-8"); final String clientLoginRequest = String.format("password=password&format=json&action=clientlogin&logintoken=b5780b6e2f27e20b450921d9461010b4&loginreturnurl=%s&username=Admin" , encodedUrl); final String clientLoginErrorRequest = String.format("password=password1&format=json&action=clientlogin&logintoken=b5780b6e2f27e20b450921d9461010b4&loginreturnurl=%s&username=Admin" , encodedUrl); if (requestBody.equals(clientLoginRequest)) { return makeJsonResponseFrom("/clientLoginSuccess.json"); } else if (requestBody.equals(clientLoginErrorRequest)) { return makeJsonResponseFrom("/clientLoginError.json"); } } catch (Exception e) { return new MockResponse().setResponseCode(404); } return new MockResponse().setResponseCode(404); } }; server = new MockWebServer(); server.setDispatcher(dispatcher); server.start(); } @AfterClass public static void finish() throws IOException { server.shutdown(); } @Before public void setUp() { connection = new BasicApiConnection(server.url("/w/api.php").toString()); } @Test public void testGetToken() throws LoginFailedException, IOException, MediaWikiApiErrorException, InterruptedException { connection.login("username", "password"); assertNotNull(connection.getOrFetchToken("csrf")); } @Test public void testGetLoginToken() throws IOException, MediaWikiApiErrorException, InterruptedException, LoginFailedException { assertNotNull(connection.getOrFetchToken("login")); } @Test public void testConfirmLogin() throws LoginFailedException, IOException, MediaWikiApiErrorException { String token = connection.getOrFetchToken("login"); connection.confirmLogin(token, "username", "password"); } @Test public void testConfirmClientLogin() throws LoginFailedException, IOException, MediaWikiApiErrorException { String token = connection.getOrFetchToken("login"); connection.confirmClientLogin(token, "Admin", "password"); } @Test public void testLogin() throws LoginFailedException { assertFalse(connection.loggedIn); connection.login("username", "password"); assertEquals("username", connection.getCurrentUser()); assertEquals("password", connection.password); assertTrue(connection.isLoggedIn()); } @Test public void testClientLogin() throws LoginFailedException { assertFalse(connection.loggedIn); connection.clientLogin("Admin", "password"); assertEquals("Admin", connection.getCurrentUser()); assertEquals("password", connection.password); assertTrue(connection.isLoggedIn()); } @Test public void testSerialize() throws LoginFailedException, IOException { connection.login("username", "password"); connection.setConnectTimeout(5000); connection.setReadTimeout(6000); assertTrue(connection.isLoggedIn()); String jsonSerialization = mapper.writeValueAsString(connection); // We skip comparing the cookie domains here, since they depend on // the mocked web server's host, which is system dependent. jsonSerialization = jsonSerialization.replaceAll("\"domain\":\"[^\"]*\"", "\"domain\":\"domain comparison should be skipped\""); assertEquals(LOGGED_IN_SERIALIZED_CONNECTION, jsonSerialization); } @Test public void testDeserialize() throws IOException { BasicApiConnection newConnection = mapper.readValue(LOGGED_IN_SERIALIZED_CONNECTION, BasicApiConnection.class); assertTrue(newConnection.isLoggedIn()); assertEquals("username", newConnection.getCurrentUser()); assertEquals(5000, newConnection.getConnectTimeout()); assertEquals(6000, newConnection.getReadTimeout()); assertEquals(server.url("/w/api.php").toString(), newConnection.getApiBaseUrl()); List cookies = newConnection.getCookies(); for (HttpCookie cookie : cookies) { if (cookie.getName().equals("GeoIP")) { assertEquals("DE:13:Dresden:51.0500:13.7500:v4", cookie.getValue()); } else { assertEquals("testwikidatawikiSession", cookie.getName()); assertEquals("c18ef92637227283bcda73bcf95cfaf5", cookie.getValue()); } } Map tokens = newConnection.getTokens(); assertEquals("b5780b6e2f27e20b450921d9461010b4", tokens.get("login")); assertNull(tokens.get("csrf")); } @Test public void testLogout() throws IOException, LoginFailedException, MediaWikiApiErrorException { connection.login("username", "password"); connection.logout(); assertEquals("", connection.username); assertEquals("", connection.password); assertFalse(connection.loggedIn); } @Test public void loginUserError() { // This will fail because the user is not known LoginFailedException loginFailedException = assertThrows(LoginFailedException.class, () -> connection.login("username1", "password1")); assertEquals("NotExists: Username does not exist.", loginFailedException.getMessage()); } @Test public void loginFailedUsesReason() { // This will fail because the user is not known LoginFailedException loginFailedException = assertThrows(LoginFailedException.class, () -> connection.login("username", "password2")); assertEquals("Incorrect username or password entered. Please try again.", loginFailedException.getMessage()); } @Test public void testGetQueryString() { Map params = new HashMap<>(); params.put("action", "login"); params.put("lgname", "username"); params.put("lgpassword", "password"); params.put("lgtoken", "b5780b6e2f27e20b450921d9461010b4"); params.put("format", "json"); assertEquals( split("lgtoken=b5780b6e2f27e20b450921d9461010b4&lgpassword=password" + "&action=login&lgname=username&format=json", '&'), split(connection.getQueryString(params), '&')); } @Test public void testPostFile() throws IOException, MediaWikiApiErrorException { Map formParams = new HashMap<>(); formParams.put("foo", "bar"); File file = File.createTempFile("upload_test", ".txt"); try { FileWriter writer = new FileWriter(file); writer.write("contents"); writer.close(); Map> fileParams = new HashMap<>(); fileParams.put("file", new ImmutablePair("hello.txt", file)); JsonNode node = connection.sendJsonRequest("POST", formParams, fileParams); assertEquals(node.get("success").asText(), "true"); } finally { file.delete(); } } @Test public void testWarnings() throws IOException { JsonNode root; URL path = this.getClass().getResource("/warnings.json"); root = mapper.readTree(path.openStream()); List warnings = connection.getWarnings(root); List expectedWarnings = Arrays .asList("[main]: Unrecognized parameter: 'rmparam'", "[query]: Unrecognized value for parameter 'list': raremodule", "[wbeditentity]: Your edit was patched into the latest version, overriding some of your own intermediate changes.", "[test]: Warning was not understood. Please report this to Wikidata Toolkit. JSON source: {\"unknown\":\"structure\"}"); assertEquals(expectedWarnings, warnings); } @Test(expected = MediaWikiApiErrorException.class) public void testErrors() throws IOException, MediaWikiApiErrorException { JsonNode root; URL path = this.getClass().getResource("/error.json"); root = mapper.readTree(path.openStream()); connection.checkErrors(root); } @Test public void testMaxlagError() throws IOException, MediaWikiApiErrorException { JsonNode root; URL path = this.getClass().getResource("/error-maxlag-full.json"); root = mapper.readTree(path.openStream()); try { connection.checkErrors(root); } catch(MaxlagErrorException e) { assertEquals(3.45, e.getLag(), 0.001); } } @Test public void testClearCookies() throws LoginFailedException, IOException, MediaWikiApiErrorException { connection.login("username", "password"); assertFalse(connection.getCookies().isEmpty()); connection.clearCookies(); assertTrue(connection.getCookies().isEmpty()); } @Test public void testGetWikidataApiConnection() { ApiConnection connection = BasicApiConnection.getWikidataApiConnection(); assertEquals("https://www.wikidata.org/w/api.php", connection.apiBaseUrl); } @Test public void testGetTestWikidataApiConnection() { ApiConnection connection = BasicApiConnection.getTestWikidataApiConnection(); assertEquals("https://test.wikidata.org/w/api.php", connection.apiBaseUrl); } @Test(expected = AssertUserFailedException.class) public void testCheckCredentials() throws IOException, MediaWikiApiErrorException, LoginFailedException { // we first login successfully connection.login("username", "password"); assertTrue(connection.isLoggedIn()); // after a while, the credentials expire connection.checkCredentials(); } /** * For backwards compatibility: by defaults, no timeouts * are set by us, we use HttpURLConnection's defaults. * @throws IOException */ @Test public void testNoTimeouts() throws IOException { assertEquals(-1, connection.getConnectTimeout()); assertEquals(-1, connection.getReadTimeout()); } @Test public void testConnectTimeout() throws IOException { connection.setConnectTimeout(5000); assertEquals(5000, connection.getConnectTimeout()); } @Test public void testReadTimeout() throws IOException { connection.setReadTimeout(5000); assertEquals(5000, connection.getReadTimeout()); } @Test public void testTimeouts() { connection.setConnectTimeout(5000); connection.setReadTimeout(5000); assertEquals(5000, connection.getConnectTimeout()); assertEquals(5000, connection.getReadTimeout()); } @Test public void testGetMethod() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "wbgetentities"); parameters.put("languages", "fr"); parameters.put("ids", "Q8"); parameters.put("sitefilter", "enwiki"); parameters.put("props", "info"); JsonNode root = connection.sendJsonRequest("GET", parameters); assertEquals("{\"entities\":{\"Q8\":{\"pageid\":134,\"ns\":0,\"title\":\"Q8\",\"lastrevid\":1174289176,\"modified\":\"2020-05-05T12:39:07Z\",\"type\":\"item\",\"id\":\"Q8\"}},\"success\":1}", mapper.writeValueAsString(root)); } @Test(expected = IllegalArgumentException.class) public void testUnsupportedMethod() throws IOException, MediaWikiApiErrorException { connection.sendJsonRequest("PUT", new HashMap<>()); } } EditingResultTest.java000066400000000000000000000023241444772566300347660ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2023 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import java.util.OptionalLong; import org.junit.Test; public class EditingResultTest { @Test public void testGetRevisionId() { EditingResult SUT = new EditingResult(1234L); assertEquals(SUT.getLastRevisionId(), OptionalLong.of(1234L)); } @Test public void testNoRevisionId() { EditingResult SUT = new EditingResult(0L); assertEquals(SUT.getLastRevisionId(), OptionalLong.empty()); } } LoginValueTest.java000066400000000000000000000034261444772566300342550ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /*- * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2022 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; public class LoginValueTest { @Test public void testOf() { LoginValue needToken = LoginValue.of("NeedToken"); assertEquals(LoginValue.LOGIN_NEEDTOKEN, needToken); } @Test public void testOfReturnsTopMatch() { LoginValue wrongpassword = LoginValue.of("wrongpassword"); assertEquals(LoginValue.LOGIN_WRONG_PASS, wrongpassword); assertNotEquals(LoginValue.LOGIN_NOT_EXISTS, wrongpassword); } @Test public void testOfReturnsUnknownIfTextUnknown() { LoginValue unknown = LoginValue.of("dunno"); assertEquals(LoginValue.UNKNOWN, unknown); } @Test public void testOfReturnsUnknownIfTextNull() { LoginValue unknown = LoginValue.of(null); assertEquals(LoginValue.UNKNOWN, unknown); } @Test public void testGetMessage() { assertEquals("NeedToken: Token or session ID is missing.", LoginValue.LOGIN_NEEDTOKEN.getMessage("NeedToken")); } } MockBasicApiConnection.java000066400000000000000000000073531444772566300356600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import org.apache.commons.lang3.tuple.ImmutablePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wikidata.wdtk.testing.MockStringContentFactory; import org.wikidata.wdtk.util.CompressionType; /** * Mock implementation of {@link ApiConnection} used for testing. * * @author Michael Guenther * */ public class MockBasicApiConnection extends BasicApiConnection { static final Logger logger = LoggerFactory .getLogger(MockBasicApiConnection.class); /** * Mapping from hashs of query parameter maps to request results. */ final HashMap webResources; /** * Constructor. */ public MockBasicApiConnection() { super("https://mocked.api.connection/w/api.php"); webResources = new HashMap<>(); } /** * Adds a new web resource to mock a request. * * @param parameters * @param result */ public void setWebResource(Map parameters, String result) { int hash = parameters.hashCode(); if (this.webResources.containsKey(hash)) { logger.warn("Overwriting mocked result for parameters " + parameters.toString()); } this.webResources.put(hash, result.getBytes(StandardCharsets.UTF_8)); } // TODO this function is very similar to a function in the // MockWebResourceFetcher. Sharing this code in some way would be more // convenient. /** * Defines the contents of a new web resource (result for an API request) by * taking a list of parameters and the string from a given (Java) resource. * * @param parameters * paramerter setting of the query string for an API request. * @param resourceClass * the Class relative to which the resource should be resolved * (since resources are stored relative to a classpath); can * usually be obtained with getClass() from the calling object * @param path * the path to the java resource * @param compressionType * the compression type of the resource file * @throws IOException * if the Java resource could not be loaded */ public void setWebResourceFromPath(Map parameters, Class resourceClass, String path, CompressionType compressionType) throws IOException { this.webResources.put(parameters.hashCode(), MockStringContentFactory .getStringFromUrl(resourceClass.getResource(path)).getBytes()); } @Override public InputStream sendRequest(String requestMethod, Map parameters, Map> files) throws IOException { // files parameter purposely ignored because we do not support mocking that yet if (this.webResources.containsKey(parameters.hashCode())) { return new ByteArrayInputStream(this.webResources.get(parameters .hashCode())); } else { throw new IOException("API result not mocked for parameters " + parameters); } } } MockGuidGenerator.java000066400000000000000000000026651444772566300347250ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2018 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * A mock version of the RandomGuidGenerator that returns predictable * identifiers. * * @author antonin * */ public class MockGuidGenerator implements GuidGenerator { private static String nextGuid = null; /** * Inits the mock supplier with an initial GUID. * @param guid */ public MockGuidGenerator(String guid) { nextGuid = guid; } /** * Sets the next GUID to be generated by this generator. * @param guid */ public void setGuid(String guid) { nextGuid = guid; } /** * Generates a statement id, using the provided entity * id and the previously supplied GUID. */ @Override public String freshStatementId(String entityId) { return entityId + STATEMENT_GUID_SEPARATOR + nextGuid; } } OAuthApiConnectionTest.java000066400000000000000000000202531444772566300356770ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2020 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.fasterxml.jackson.databind.ObjectMapper; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import java.io.IOException; import java.util.Collections; import static org.junit.Assert.*; public class OAuthApiConnectionTest { private static final String CONSUMER_KEY = "consumer_key"; private static final String CONSUMER_SECRET = "consumer_secret"; private static final String ACCESS_TOKEN = "access_token"; private static final String ACCESS_SECRET = "access_secret"; private static MockWebServer server; private OAuthApiConnection connection; private final ObjectMapper mapper = new ObjectMapper(); private final String NOT_LOGGED_IN_SERIALIZED = "{\"baseUrl\":\"" + server.url("/w/api.php") + "\"," + "\"consumerKey\":null," + "\"consumerSecret\":null," + "\"accessToken\":null," + "\"accessSecret\":null," + "\"username\":\"\"," + "\"loggedIn\":false," + "\"tokens\":{}," + "\"connectTimeout\":-1," + "\"readTimeout\":-1}"; private final String LOGGED_IN_SERIALIZED = "{\"baseUrl\":\"" + server.url("/w/api.php") + "\"," + "\"consumerKey\":\"consumer_key\"," + "\"consumerSecret\":\"consumer_secret\"," + "\"accessToken\":\"access_token\"," + "\"accessSecret\":\"access_secret\"," + "\"username\":\"foo\"," + "\"loggedIn\":true," + "\"tokens\":{}," + "\"connectTimeout\":-1," + "\"readTimeout\":-1}"; @BeforeClass public static void init() throws IOException { Dispatcher dispatcher = new Dispatcher() { @Override public MockResponse dispatch(RecordedRequest request) throws InterruptedException { switch (request.getBody().readUtf8()) { case "languages=fr&assert=user&format=json&action=wbgetentities&ids=Q8&sitefilter=enwiki&props=info%7Cdatatype%7Clabels%7Caliases%7Cdescriptions%7Csitelinks": return new MockResponse() .addHeader("Content-Type", "application/json; charset=utf-8") .setBody("{\"entities\":{\"Q8\":{\"pageid\":134,\"ns\":0,\"title\":\"Q8\",\"lastrevid\":1174289176,\"modified\":\"2020-05-05T12:39:07Z\",\"type\":\"item\",\"id\":\"Q8\",\"labels\":{\"fr\":{\"language\":\"fr\",\"value\":\"bonheur\"}},\"descriptions\":{\"fr\":{\"language\":\"fr\",\"value\":\"état émotionnel\"}},\"aliases\":{\"fr\":[{\"language\":\"fr\",\"value\":\":)\"},{\"language\":\"fr\",\"value\":\"\uD83D\uDE04\"},{\"language\":\"fr\",\"value\":\"\uD83D\uDE03\"}]},\"sitelinks\":{\"enwiki\":{\"site\":\"enwiki\",\"title\":\"Happiness\",\"badges\":[]}}}},\"success\":1}"); case "meta=userinfo&assert=user&format=json&action=query": return new MockResponse() .addHeader("Content-Type", "application/json; charset=utf-8") .setBody("{\"batchcomplete\":\"\",\"query\":{\"userinfo\":{\"id\":2333,\"name\":\"foo\"}}}"); default: return new MockResponse().setResponseCode(404); } } }; server = new MockWebServer(); server.setDispatcher(dispatcher); server.start(); } @AfterClass public static void finish() throws IOException { server.shutdown(); } @Before public void setUp() { connection = new OAuthApiConnection(server.url("/w/api.php").toString(), CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_SECRET); } @Test public void testFetchOnlineData() throws IOException, MediaWikiApiErrorException, InterruptedException { WikibaseDataFetcher wbdf = new WikibaseDataFetcher(connection, Datamodel.SITE_WIKIDATA); wbdf.getFilter().setSiteLinkFilter(Collections.singleton("enwiki")); wbdf.getFilter().setLanguageFilter(Collections.singleton("fr")); wbdf.getFilter().setPropertyFilter( Collections.emptySet()); EntityDocument q8 = wbdf.getEntityDocument("Q8"); String result = ""; if (q8 instanceof ItemDocument) { result = "The French label for entity Q8 is " + ((ItemDocument) q8).getLabels().get("fr").getText() + "\nand its English Wikipedia page has the title " + ((ItemDocument) q8).getSiteLinks().get("enwiki") .getPageTitle() + "."; } assertEquals("The French label for entity Q8 is bonheur\n" + "and its English Wikipedia page has the title Happiness.", result); } @Test public void testLogout() throws IOException, InterruptedException { assertTrue(connection.isLoggedIn()); assertEquals("foo", connection.getCurrentUser()); connection.logout(); assertEquals("", connection.getCurrentUser()); assertFalse(connection.isLoggedIn()); assertEquals("", connection.getCurrentUser()); RecordedRequest request = server.takeRequest(); assertNotNull(request.getHeader("Authorization")); } @Test public void testSerialize() throws IOException, LoginFailedException { String jsonSerialization = mapper.writeValueAsString(connection); assertEquals(LOGGED_IN_SERIALIZED, jsonSerialization); } @Test public void testDeserialize() throws IOException { OAuthApiConnection newConnection = mapper.readValue(LOGGED_IN_SERIALIZED, OAuthApiConnection.class); assertTrue(newConnection.isLoggedIn()); assertEquals(CONSUMER_KEY, newConnection.getConsumerKey()); assertEquals(CONSUMER_SECRET, newConnection.getConsumerSecret()); assertEquals(ACCESS_TOKEN, newConnection.getAccessToken()); assertEquals(ACCESS_SECRET, newConnection.getAccessSecret()); assertEquals(server.url("/w/api.php").toString(), newConnection.getApiBaseUrl()); assertEquals("foo", newConnection.getCurrentUser()); assertEquals(-1, connection.getConnectTimeout()); assertEquals(-1, connection.getReadTimeout()); assertTrue(connection.getTokens().isEmpty()); } @Test public void testDeserializeNotLogin() throws IOException { OAuthApiConnection connection = mapper.readValue(NOT_LOGGED_IN_SERIALIZED, OAuthApiConnection.class); assertFalse(connection.isLoggedIn()); assertNull(CONSUMER_KEY, connection.getConsumerKey()); assertNull(CONSUMER_SECRET, connection.getConsumerSecret()); assertNull(ACCESS_TOKEN, connection.getAccessToken()); assertNull(ACCESS_SECRET, connection.getAccessSecret()); assertEquals(server.url("/w/api.php").toString(), connection.getApiBaseUrl()); assertEquals(-1, connection.getConnectTimeout()); assertEquals(-1, connection.getReadTimeout()); assertTrue(connection.getTokens().isEmpty()); } } StatementUpdateTest.java000066400000000000000000000346131444772566300353210ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.ReferenceBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.helpers.JsonSerializer; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.Reference; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @Deprecated public class StatementUpdateTest { final static ItemIdValue Q1 = Datamodel.makeWikidataItemIdValue("Q1"); final static ItemIdValue Q2 = Datamodel.makeWikidataItemIdValue("Q2"); final static ItemIdValue Q3 = Datamodel.makeWikidataItemIdValue("Q3"); final static PropertyIdValue P1 = Datamodel .makeWikidataPropertyIdValue("P1"); final static PropertyIdValue P2 = Datamodel .makeWikidataPropertyIdValue("P2"); final static PropertyIdValue P3 = Datamodel .makeWikidataPropertyIdValue("P3"); @Test public void testMergeReferences() { Reference r1 = ReferenceBuilder.newInstance().withPropertyValue(P1, Q1) .withPropertyValue(P2, Q2).build(); Reference r2 = ReferenceBuilder.newInstance().withPropertyValue(P1, Q1) .build(); Reference r3 = ReferenceBuilder.newInstance().withPropertyValue(P2, Q2) .build(); Reference r4 = ReferenceBuilder.newInstance().withPropertyValue(P2, Q2) .withPropertyValue(P1, Q1).build(); Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withReference(r1).withReference(r2) .withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withReference(r3).withReference(r4).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s2), Collections.emptyList()); Statement s1merged = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withReference(r1).withReference(r2) .withReference(r3).withId("ID-s1").build(); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(1, su.toKeep.get(P1).size()); assertEquals(s1merged, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); } @Test public void testMergeRanks() { Reference r1 = ReferenceBuilder.newInstance().withPropertyValue(P1, Q1) .withPropertyValue(P2, Q2).build(); Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withRank(StatementRank.PREFERRED).withValue(Q1) .withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withReference(r1).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s2), Collections.emptyList()); Statement s1merged = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withRank(StatementRank.PREFERRED) .withReference(r1).withId("ID-s1").build(); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(1, su.toKeep.get(P1).size()); assertEquals(s1merged, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); } @Test public void testNoMergeDiffMainSnak() { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q2).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s2), Collections.emptyList()); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(2, su.toKeep.get(P1).size()); assertEquals(s2, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); assertEquals(s1, su.toKeep.get(P1).get(1).statement); assertFalse(su.toKeep.get(P1).get(1).write); } @Test public void testNoMergeDiffQualifier() { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withQualifierValue(P3, Q2).withId("ID-s1") .build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withQualifierValue(P3, Q3).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s2), Collections.emptyList()); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(2, su.toKeep.get(P1).size()); assertEquals(s2, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); assertEquals(s1, su.toKeep.get(P1).get(1).statement); assertFalse(su.toKeep.get(P1).get(1).write); } @Test public void testNoMergeRankConflict() { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withRank(StatementRank.PREFERRED) .withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withRank(StatementRank.DEPRECATED).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s2), Collections.emptyList()); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(2, su.toKeep.get(P1).size()); assertEquals(s2, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); assertEquals(s1, su.toKeep.get(P1).get(1).statement); assertFalse(su.toKeep.get(P1).get(1).write); } @Test public void testUpdateStatement() { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q2).withId("ID-s1").build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s2), Collections.emptyList()); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(1, su.toKeep.get(P1).size()); assertEquals(s2, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); } @Test public void testAddStatements() throws JsonProcessingException { // Inserting new P2 statements won't touch existing P1 statement Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q1).withId("ID-s2").build(); Statement s3 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q2).build(); Statement s4 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q3).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).withStatement(s2).build(); StatementUpdate su = new StatementUpdate(currentDocument, Arrays.asList(s3, s4), Collections.emptyList()); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P2)); assertEquals(3, su.toKeep.get(P2).size()); assertEquals(s3, su.toKeep.get(P2).get(0).statement); assertTrue(su.toKeep.get(P2).get(0).write); assertEquals(s4, su.toKeep.get(P2).get(1).statement); assertTrue(su.toKeep.get(P2).get(1).write); assertEquals(s2, su.toKeep.get(P2).get(2).statement); assertFalse(su.toKeep.get(P2).get(2).write); assertEquals("{\"claims\":[" + JsonSerializer.getJsonString(s3) + "," + JsonSerializer.getJsonString(s4) + "]}", su.getJsonUpdateString()); } @Test public void testDeleteAndAdd() { // Explicitly deleted statement won't merge Reference r1 = ReferenceBuilder.newInstance().withPropertyValue(P1, Q1) .build(); Reference r2 = ReferenceBuilder.newInstance().withPropertyValue(P2, Q2) .build(); Statement s3 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withReference(r1).withValue(Q1).withId("ID-s3").build(); Statement s4 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withReference(r2).withValue(Q1).withId("ID-s4").build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s4).build(); List addStatements = Collections.singletonList(s3); List deleteStatements = Collections.singletonList(s4); StatementUpdate su = new StatementUpdate(currentDocument, addStatements, deleteStatements); assertTrue(su.toDelete.contains("ID-s4")); assertTrue(su.toKeep.containsKey(P2)); assertEquals(1, su.toKeep.get(P2).size()); assertEquals(s3, su.toKeep.get(P2).get(0).statement); assertFalse(su.isEmptyEdit()); } @Test public void testMergeExisting() { // Existing duplicates are removed in passing, when modifying statements // of a property Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s2").build(); Statement s3 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q2).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).withStatement(s2).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s3), Collections.emptyList()); assertEquals(su.toDelete, Collections.singletonList("ID-s1")); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P1)); assertEquals(2, su.toKeep.get(P1).size()); assertEquals(s3, su.toKeep.get(P1).get(0).statement); assertTrue(su.toKeep.get(P1).get(0).write); assertEquals(s2, su.toKeep.get(P1).get(1).statement); assertFalse(su.toKeep.get(P1).get(1).write); assertFalse(su.isEmptyEdit()); } @Test public void testNullEdit() { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s1").build(); Statement s1dup = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s2").build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.singletonList(s1dup), Collections.singletonList(s2)); assertTrue(su.isEmptyEdit()); } @Test public void testMergeNew() { // Additions of duplicates are merged Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P3) .withValue(Q1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P3) .withValue(Q1).build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .build(); StatementUpdate su = new StatementUpdate(currentDocument, Arrays.asList(s1, s2), Collections.emptyList()); assertEquals(0, su.toDelete.size()); assertEquals(1, su.toKeep.size()); assertTrue(su.toKeep.containsKey(P3)); assertEquals(1, su.toKeep.get(P3).size()); assertEquals(s1, su.toKeep.get(P3).get(0).statement); assertTrue(su.toKeep.get(P3).get(0).write); assertFalse(su.isEmptyEdit()); } @Test public void testDelete() throws IOException { Statement s1 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q1).withId("ID-s1").build(); Statement s2 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q2).withId("ID-s2").build(); Statement s3 = StatementBuilder.forSubjectAndProperty(Q1, P1) .withValue(Q3).withId("ID-s3").build(); Statement s4 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q1).withId("ID-s4").build(); Statement s4changed = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q2).withId("ID-s4").build(); Statement s5 = StatementBuilder.forSubjectAndProperty(Q1, P2) .withValue(Q3).withId("ID-s5").build(); Statement s6 = StatementBuilder.forSubjectAndProperty(Q1, P3) .withValue(Q1).withId("ID-s6").build(); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withStatement(s1).withStatement(s2).withStatement(s4changed) .withStatement(s5).withStatement(s6).build(); StatementUpdate su = new StatementUpdate(currentDocument, Collections.emptyList(), Arrays.asList(s2, s3, s4, s5)); ObjectMapper mapper = new ObjectMapper(); JsonNode expectedJson = mapper.readTree("{\"claims\":[{\"id\":\"ID-s2\",\"remove\":\"\"},{\"id\":\"ID-s5\",\"remove\":\"\"}]}"); JsonNode actualJson = mapper.readTree(su.getJsonUpdateString()); assertEquals(Arrays.asList("ID-s2", "ID-s5"), su.toDelete); assertEquals(0, su.toKeep.size()); assertEquals(expectedJson, actualJson); assertFalse(su.isEmptyEdit()); } } TermStatementUpdateTest.java000066400000000000000000000274101444772566300361460ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapi/* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wikidata.wdtk.wikibaseapi; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; @Deprecated public class TermStatementUpdateTest { final static ItemIdValue Q1 = Datamodel.makeWikidataItemIdValue("Q1"); protected TermStatementUpdate makeUpdate(ItemDocument doc, List labels, List descriptions, List addedLabels, List deletedLabels) { return new TermStatementUpdate( doc, Collections.emptyList(), Collections.emptyList(), labels, descriptions, addedLabels, deletedLabels); } /** * Adding a label on an empty item. */ @Test public void testAddLabel() { ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).build(); MonolingualTextValue label = Datamodel.makeMonolingualTextValue("Apfelstrudel", "de"); TermStatementUpdate su = makeUpdate(currentDocument, Collections.singletonList(label), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); // Check model assertEquals(Collections.singleton("de"), su.getLabelUpdates().keySet()); assertEquals(label.getText(), su.getLabelUpdates().get("de").getText()); assertTrue(su.getAliasUpdates().isEmpty()); assertTrue(su.getDescriptionUpdates().isEmpty()); // Check JSON output assertEquals("{\"labels\":{\"de\":{\"language\":\"de\",\"value\":\"Apfelstrudel\"}}}", su.getJsonUpdateString()); assertFalse(su.isEmptyEdit()); } /** * When trying to add an alias for a language that does not have * any label yet, add as label instead. */ @Test public void testAddAliasWithoutLabel() { ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).build(); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "de"); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.emptyList(), Collections.singletonList(alias), Collections.emptyList()); assertEquals(su.getLabelUpdates().keySet(), Collections.singleton("de")); assertEquals(su.getLabelUpdates().get("de").getText(), alias.getText()); assertTrue(su.getAliasUpdates().isEmpty()); assertTrue(su.getDescriptionUpdates().isEmpty()); assertFalse(su.isEmptyEdit()); } /** * Adding a label and an alias at the same time. */ @Test public void testAddLabelAndAlias() { ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).build(); MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); TermStatementUpdate su = makeUpdate(currentDocument, Collections.singletonList(label), Collections.emptyList(), Collections.singletonList(alias), Collections.emptyList()); assertEquals(Collections.singleton("fr"), su.getLabelUpdates().keySet()); assertEquals(label.getText(), su.getLabelUpdates().get("fr").getText()); assertEquals(Collections.singleton("fr"), su.getAliasUpdates().keySet()); assertEquals(alias.getText(), su.getAliasUpdates().get("fr").get(0).getText()); assertTrue(su.getDescriptionUpdates().isEmpty()); assertFalse(su.isEmptyEdit()); } /** * Adding the same alias twice. */ @Test public void testAliasTwice() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).build(); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); List newAliases = new ArrayList<>(); newAliases.add(alias); newAliases.add(alias); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.emptyList(), newAliases, Collections.emptyList()); assertTrue(su.getLabelUpdates().isEmpty()); assertEquals(su.getAliasUpdates().size(), 1); assertEquals("{\"aliases\":{\"fr\":[{\"language\":\"fr\",\"value\":\"Apfelstrudel\"}]}}", su.getJsonUpdateString()); assertFalse(su.isEmptyEdit()); } /** * Adding an alias on an item that has already got one */ @Test public void testAliasMerge() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).withAlias(alias).build(); MonolingualTextValue newAlias = Datamodel.makeMonolingualTextValue("Apfelstrudeln", "fr"); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.emptyList(), Collections.singletonList(newAlias), Collections.emptyList()); assertTrue(su.getLabelUpdates().isEmpty()); assertEquals(1, su.getAliasUpdates().size()); assertEquals(2, su.getAliasUpdates().get("fr").size()); assertEquals("{\"aliases\":{\"fr\":[{\"language\":\"fr\",\"value\":\"Apfelstrudel\"},{\"language\":\"fr\",\"value\":\"Apfelstrudeln\"}]}}", su.getJsonUpdateString()); assertFalse(su.isEmptyEdit()); } /** * Adding an alias identical to the label in the same language does not do anything */ @Test public void testAddLabelAsAlias() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("Apfelstrudel", "de"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).build(); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.emptyList(), Collections.singletonList(label), Collections.emptyList() ); assertTrue(su.getLabelUpdates().isEmpty()); assertTrue(su.getAliasUpdates().isEmpty()); assertTrue(su.getDescriptionUpdates().isEmpty()); assertTrue(su.isEmptyEdit()); } /** * Adding a label identical to an alias updates the label and deletes the alias */ @Test public void testAddAliasAsLabel() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).withAlias(alias).build(); TermStatementUpdate su = makeUpdate(currentDocument, Collections.singletonList(alias), Collections.emptyList(), Collections.emptyList(), Collections.emptyList() ); assertEquals(Collections.singleton("fr"), su.getAliasUpdates().keySet()); assertTrue(su.getAliasUpdates().get("fr").isEmpty()); assertEquals(Collections.singleton("fr"), su.getLabelUpdates().keySet()); assertEquals(su.getLabelUpdates().get("fr").getText(), alias.getText()); assertTrue(su.getDescriptionUpdates().isEmpty()); assertFalse(su.isEmptyEdit()); } /** * Deleting an alias */ @Test public void testDeleteAlias() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).withAlias(alias).build(); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(alias) ); assertTrue(su.getLabelUpdates().isEmpty()); assertEquals(su.getAliasUpdates().size(), 1); assertEquals(su.getAliasUpdates().get("fr").size(), 0); assertEquals("{\"aliases\":{\"fr\":[]}}", su.getJsonUpdateString()); assertFalse(su.isEmptyEdit()); } /** * Adding a description, for the sake of coverage… */ @Test public void testDescription() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).withAlias(alias).build(); MonolingualTextValue description = Datamodel.makeMonolingualTextValue("délicieuse pâtisserie aux pommes", "fr"); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.singletonList(description), Collections.emptyList(), Collections.emptyList()); assertTrue(su.getLabelUpdates().isEmpty()); assertTrue(su.getAliasUpdates().isEmpty()); assertEquals(Collections.singleton("fr"), su.getDescriptionUpdates().keySet()); assertEquals("délicieuse pâtisserie aux pommes", su.getDescriptionUpdates().get("fr").getText()); assertEquals("{\"descriptions\":{\"fr\":{\"language\":\"fr\",\"value\":\"délicieuse pâtisserie aux pommes\"}}}", su.getJsonUpdateString()); assertFalse(su.isEmptyEdit()); } /** * Adding a label, identical to the current one */ @Test public void testAddIdenticalLabel() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1).withLabel(label).build(); TermStatementUpdate su = makeUpdate(currentDocument, Collections.singletonList(label), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); assertEquals("{}", su.getJsonUpdateString()); assertTrue(su.isEmptyEdit()); } /** * Adding a description, identical to the current one */ @Test public void testAddIdenticalDescription() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue description = Datamodel.makeMonolingualTextValue("délicieuse pâtisserie aux pommes", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withLabel(label) .withDescription(description) .build(); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.singletonList(description), Collections.emptyList(), Collections.emptyList()); assertEquals("{}", su.getJsonUpdateString()); assertTrue(su.isEmptyEdit()); } /** * Adding an alias, identical to the current one */ @Test public void testAddIdenticalAlias() { MonolingualTextValue label = Datamodel.makeMonolingualTextValue("strudel aux pommes", "fr"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Apfelstrudel", "fr"); ItemDocument currentDocument = ItemDocumentBuilder.forItemId(Q1) .withLabel(label) .withAlias(alias) .build(); TermStatementUpdate su = makeUpdate(currentDocument, Collections.emptyList(), Collections.emptyList(), Collections.singletonList(alias), Collections.emptyList()); assertEquals("{}", su.getJsonUpdateString()); assertTrue(su.isEmptyEdit()); } } WbEditingActionTest.java000066400000000000000000000167331444772566300352270ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; import static org.junit.Assert.assertEquals; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertNull; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.wikibaseapi.apierrors.MaxlagErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.TokenErrorException; public class WbEditingActionTest { @Test(expected = IOException.class) public void testOffineErrors() throws IOException, MediaWikiApiErrorException { MockBasicApiConnection con = new MockBasicApiConnection(); WbEditingAction weea = new WbEditingAction(con, Datamodel.SITE_WIKIDATA); EntityDocument result = weea.wbEditEntity("Q42", null, null, null, "{}", true, false, 0, null, null); assertNull(result); } @Test(expected = TokenErrorException.class) public void testApiErrorGettingToken() throws IOException, MediaWikiApiErrorException { MockBasicApiConnection con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put("action", "query"); params.put("meta", "tokens"); params.put("type", "csrf"); params.put("format", "json"); // This error makes no sense for this action, but that does not matter // here: con.setWebResourceFromPath(params, this.getClass(), "/error-badtoken.json", CompressionType.NONE); params.clear(); params.put("action", "wbeditentity"); params.put("id", "Q42"); params.put("token", null); params.put("format", "json"); params.put("data", "{}"); params.put("maxlag", "5"); con.setWebResourceFromPath(params, this.getClass(), "/error-badtoken.json", CompressionType.NONE); WbEditingAction weea = new WbEditingAction(con, Datamodel.SITE_WIKIDATA); weea.wbEditEntity("Q42", null, null, null, "{}", false, false, 0, null, null); } @Test(expected = TokenErrorException.class) public void testNoTokenInReponse() throws IOException, MediaWikiApiErrorException { MockBasicApiConnection con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put("action", "query"); params.put("meta", "tokens"); params.put("format", "json"); params.put("type", "csrf"); // This error makes no sense for this action, but that does not matter // here: con.setWebResource(params, "{}"); params.clear(); params.put("action", "wbeditentity"); params.put("id", "Q42"); params.put("token", null); params.put("format", "json"); params.put("data", "{}"); params.put("maxlag", "5"); con.setWebResourceFromPath(params, this.getClass(), "/error-badtoken.json", CompressionType.NONE); WbEditingAction weea = new WbEditingAction(con, Datamodel.SITE_WIKIDATA); weea.wbEditEntity("Q42", null, null, null, "{}", false, false, 0, null, null); } @Test(expected = MaxlagErrorException.class) public void testApiErrorMaxLag() throws IOException, MediaWikiApiErrorException { MockBasicApiConnection con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put("action", "query"); params.put("meta", "tokens"); params.put("type", "csrf"); params.put("format", "json"); con.setWebResourceFromPath(params, this.getClass(), "/query-csrf-token-loggedin-response.json", CompressionType.NONE); params.clear(); params.put("action", "wbeditentity"); params.put("id", "Q42"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("data", "{}"); params.put("maxlag", "5"); con.setWebResourceFromPath(params, this.getClass(), "/error-maxlag.json", CompressionType.NONE); WbEditingAction weea = new WbEditingAction(con, Datamodel.SITE_WIKIDATA); weea.setMaxLagFirstWaitTime(0); // speed up the test ... weea.wbEditEntity("Q42", null, null, null, "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testIdAndSite() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity("Q1234", "enwiki", null, null, "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testIdAndTitle() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity("Q1234", null, "Title", null, "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testTitleNoSite() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity(null, null, "Title", null, "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testNewAndId() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity("Q1234", null, null, "item", "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testNewAndSite() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity(null, "enwiki", null, "item", "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testNewAndTitle() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity(null, null, "Title", "item", "{}", false, false, 0, null, null); } @Test(expected = IllegalArgumentException.class) public void testNoTask() throws IOException, MediaWikiApiErrorException { WbEditingAction weea = new WbEditingAction( new MockBasicApiConnection(), Datamodel.SITE_WIKIDATA); weea.wbEditEntity(null, null, null, null, "{}", false, false, 0, null, null); } @Test public void testGetLag() throws IOException, MediaWikiApiErrorException { MockBasicApiConnection con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put("action", "query"); params.put("maxlag", "-1"); params.put("format", "json"); con.setWebResourceFromPath(params, this.getClass(), "/error-maxlag-full.json", CompressionType.NONE); WbEditingAction weea = new WbEditingAction( con, Datamodel.SITE_WIKIDATA); double lag = weea.getCurrentLag(); assertEquals(3.45, lag, 0.001); } } WbGetEntitiesActionTest.java000066400000000000000000000153031444772566300360600ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; public class WbGetEntitiesActionTest { MockBasicApiConnection con; WbGetEntitiesAction action; @Before public void setUp() throws Exception { this.con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put("action", "wbgetentities"); params.put("format", "json"); params.put("ids", "Q32063953"); this.con.setWebResourceFromPath(params, getClass(), "/wbgetentities-Q32063953.json", CompressionType.NONE); params.put("ids", "Q6|Q42|P31"); this.con.setWebResourceFromPath(params, getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); params.put("props", "datatype|labels|aliases|descriptions|claims|sitelinks"); this.con.setWebResourceFromPath(params, getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); params.put("languages", "en"); params.put("sitefilter", "enwiki"); this.con.setWebResourceFromPath(params, getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); params.clear(); params.put("action", "wbgetentities"); params.put("format", "json"); params.put("ids", "M91629437"); this.con.setWebResourceFromPath(params, getClass(), "/wbgetentities-missing-mid.json", CompressionType.NONE); params.clear(); params.put("action", "wbgetentities"); params.put("format", "json"); params.put("titles", "File:Foo-1.png|File:Bar.svg"); params.put("sites", "commonswiki"); this.con.setWebResourceFromPath(params, getClass(), "/wbgetentities-titles.json", CompressionType.NONE); this.action = new WbGetEntitiesAction(this.con, Datamodel.SITE_WIKIDATA); } @Test public void testWbGetEntitiesWithProps() throws MediaWikiApiErrorException, IOException { WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.ids = "Q6|Q42|P31"; properties.props = "datatype|labels|aliases|descriptions|claims|sitelinks"; Map result1 = action.wbGetEntities(properties); Map result2 = action.wbGetEntities( properties.ids, null, null, properties.props, null, null); assertTrue(result1.containsKey("Q42")); assertEquals(result1, result2); } @Test public void testWbGetEntitiesNoProps() throws MediaWikiApiErrorException, IOException { WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.ids = "Q6|Q42|P31"; Map result1 = action.wbGetEntities(properties); Map result2 = action.wbGetEntities( properties.ids, null, null, properties.props, null, null); assertTrue(result1.containsKey("Q42")); assertEquals(result1, result2); } @Test public void testWbGetEntitiesRedirected() throws MediaWikiApiErrorException, IOException { WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.ids = "Q32063953"; Map result = action.wbGetEntities(properties); assertTrue(result.containsKey("Q32063953")); } @Test public void testWbGetEntitiesPropsFilters() throws MediaWikiApiErrorException, IOException { WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.ids = "Q6|Q42|P31"; properties.props = "datatype|labels|aliases|descriptions|claims|sitelinks"; properties.languages = "en"; properties.sitefilter = "enwiki"; Map result1 = action.wbGetEntities(properties); Map result2 = action.wbGetEntities( properties.ids, null, null, properties.props, null, null); assertTrue(result1.containsKey("Q42")); assertEquals(result1, result2); } @Test public void testWbGetEntitiesTitles() throws MediaWikiApiErrorException, IOException { WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.titles = "File:Foo-1.png|File:Bar.svg"; properties.sites = "commonswiki"; Map result1 = action.wbGetEntities(properties); Map result2 = action.wbGetEntities( null, properties.sites, properties.titles, properties.props, null, null); assertTrue(result1.containsKey("File:Foo-1.png")); assertTrue(result1.containsKey("File:Bar.svg")); assertEquals(result1, result2); } @Test(expected = IOException.class) public void testWbGetEntitiesIoError() throws MediaWikiApiErrorException, IOException { WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.ids = "Q6|Q42|notmocked"; action.wbGetEntities(properties); } @Test(expected = IllegalArgumentException.class) public void testIdsAndTitles() throws MediaWikiApiErrorException, IOException { action.wbGetEntities("Q42", null, "Tim Berners Lee", null, null, null); } @Test(expected = IllegalArgumentException.class) public void testIdsAndSites() throws MediaWikiApiErrorException, IOException { action.wbGetEntities("Q42", "enwiki", null, null, null, null); } @Test(expected = IllegalArgumentException.class) public void testTitlesNoSites() throws MediaWikiApiErrorException, IOException { action.wbGetEntities(null, null, "Tim Berners Lee", null, null, null); } @Test(expected = IllegalArgumentException.class) public void testNoTitlesOrIds() throws MediaWikiApiErrorException, IOException { action.wbGetEntities(null, "enwiki", null, null, null, null); } // for https://github.com/Wikidata/Wikidata-Toolkit/issues/643 @Test public void testMissingMid() throws MediaWikiApiErrorException, IOException { action.wbGetEntities("M91629437", null, null, null, null, null); } } WbSearchEntitiesActionTest.java000066400000000000000000000077001444772566300365500ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; public class WbSearchEntitiesActionTest { MockBasicApiConnection con; WbSearchEntitiesAction action; @Before public void setUp() throws Exception { this.con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put(ApiConnection.PARAM_ACTION, "wbsearchentities"); params.put(ApiConnection.PARAM_FORMAT, "json"); params.put("search", "abc"); params.put("language", "en"); this.con.setWebResourceFromPath(params, getClass(), "/wbsearchentities-abc.json", CompressionType.NONE); params.put("search", "some search string with no results"); this.con.setWebResourceFromPath(params, getClass(), "/wbsearchentities-empty.json", CompressionType.NONE); this.action = new WbSearchEntitiesAction(this.con, Datamodel.SITE_WIKIDATA); } @Test public void testWbSearchEntities() throws MediaWikiApiErrorException, IOException { List results = action.wbSearchEntities("abc", "en", null, null, null, null, null); assertEquals(7, results.size()); WbSearchEntitiesResult firstResult = results.get(0); assertEquals("Q169889", firstResult.getEntityId()); assertEquals(firstResult.getConceptUri(), "http://www.wikidata.org/entity/Q169889"); assertEquals(firstResult.getUrl(), "//www.wikidata.org/wiki/Q169889"); assertEquals("Q169889", firstResult.getTitle()); assertEquals(170288, firstResult.getPageId()); assertEquals("American Broadcasting Company", firstResult.getLabel()); assertEquals("American broadcast television network", firstResult.getDescription()); WbSearchEntitiesResult.Match match = new JacksonWbSearchEntitiesResult.JacksonMatch( "alias", "en", "ABC"); assertEquals(match, firstResult.getMatch()); List aliases = new ArrayList<>(); aliases.add("ABC"); assertEquals(aliases, firstResult.getAliases()); } @Test public void testWbSearchEntitiesEmpty() throws MediaWikiApiErrorException, IOException { List results = action.wbSearchEntities( "some search string with no results", "en", null, null, null, null, null); assertTrue(results.isEmpty()); } @Test(expected = IllegalArgumentException.class) public void testIdsAndTitles() throws MediaWikiApiErrorException, IOException { action.wbSearchEntities(null, "en", null, null, null, null, null); } @Test(expected = IllegalArgumentException.class) public void testIdsAndSites() throws MediaWikiApiErrorException, IOException { action.wbSearchEntities("abc", null, null, null, null, null, null); } } WikibaseDataEditorTest.java000066400000000000000000001360611444772566300357110ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.only; import static org.mockito.Mockito.when; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeStringValue; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeQuantityValue; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeWikidataFormIdValue; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeWikidataItemIdValue; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeWikidataLexemeIdValue; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeWikidataPropertyIdValue; import static org.wikidata.wdtk.datamodel.helpers.Datamodel.makeWikidataSenseIdValue; import java.io.IOException; import java.math.BigDecimal; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.AliasUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.FormUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.ItemDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.ItemUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.JsonSerializer; import org.wikidata.wdtk.datamodel.helpers.LexemeUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.PropertyDocumentBuilder; import org.wikidata.wdtk.datamodel.helpers.SenseUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementBuilder; import org.wikidata.wdtk.datamodel.helpers.StatementUpdateBuilder; import org.wikidata.wdtk.datamodel.helpers.TermUpdateBuilder; import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityUpdate; import org.wikidata.wdtk.datamodel.interfaces.FormDocument; import org.wikidata.wdtk.datamodel.interfaces.FormUpdate; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemUpdate; import org.wikidata.wdtk.datamodel.interfaces.LexemeDocument; import org.wikidata.wdtk.datamodel.interfaces.LexemeUpdate; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.SenseDocument; import org.wikidata.wdtk.datamodel.interfaces.SenseUpdate; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.testing.MockStringContentFactory; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.TagsApplyNotAllowedException; import org.wikidata.wdtk.wikibaseapi.apierrors.TokenErrorException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import static org.junit.Assert.*; public class WikibaseDataEditorTest { MockBasicApiConnection con; ItemIdValue Q5 = Datamodel.makeWikidataItemIdValue("Q5"); PropertyIdValue P31 = Datamodel.makeWikidataPropertyIdValue("P31"); static final String TEST_GUID = "427C0317-BA8C-95B0-16C8-1A1B5FAC1081"; MockGuidGenerator guids = new MockGuidGenerator(TEST_GUID); ObjectMapper mapper = new ObjectMapper(); @Before public void setUp() throws IOException { this.con = new MockBasicApiConnection(); Map params = new HashMap<>(); params.put("action", "query"); params.put("meta", "tokens"); params.put("type", "csrf"); params.put("format", "json"); this.con.setWebResourceFromPath(params, this.getClass(), "/query-csrf-token-loggedin-response.json", CompressionType.NONE); } @Test public void testSetMaxLag() { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setMaxLag(3); assertEquals(3, wde.getMaxLag()); } @Test public void testSetMaxLagFirstWaitTime() { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setMaxLagFirstWaitTime(5432); assertEquals(5432, wde.getMaxLagFirstWaitTime()); } @Test public void testSetMaxLagBackOffFactor() { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setMaxLagBackOffFactor(2.7); assertTrue(2.69 < wde.getMaxLagBackOffFactor() && 2.71 > wde.getMaxLagBackOffFactor()); } @Test public void testSetMaxLagMaxRetries() { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setMaxLagMaxRetries(78); assertEquals(78, wde.getMaxLagMaxRetries()); } @Test public void testSetAverageTimePerEdit() { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setAverageTimePerEdit(5000); assertEquals(5000, wde.getAverageTimePerEdit()); } @Test public void testSetRemainingEdits() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setRemainingEdits(1); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); ItemDocument expectedResultDocument = ItemDocumentBuilder .forItemId(Datamodel.makeWikidataItemIdValue("Q1234")) .withRevisionId(1234).build(); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); assertEquals(1, wde.getRemainingEdits()); ItemDocument result = wde .createItemDocument(itemDocument, "My summary", null); assertEquals(expectedResultDocument, result); assertEquals(0, wde.getRemainingEdits()); result = wde.createItemDocument(itemDocument, "My summary", null); assertNull(result); assertEquals(0, wde.getRemainingEdits()); } @Test public void testDisableEditing() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.disableEditing(); assertEquals(0, wde.getRemainingEdits()); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); ItemDocument result = wde .createItemDocument(itemDocument, "My summary", null); assertNull(result); assertEquals(0, wde.getRemainingEdits()); } @Test public void testCreateItem() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); ItemDocument expectedResultDocument = ItemDocumentBuilder .forItemId(Datamodel.makeWikidataItemIdValue("Q1234")) .withRevisionId(1234).build(); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("tags", "my-tag"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); EntityDocument result = wde .createEntityDocument(itemDocument, "My summary", Collections.singletonList("my-tag")); assertEquals(expectedResultDocument, result); assertEquals(-1, wde.getRemainingEdits()); } @Test(expected = TokenErrorException.class) public void testCreateItemBadToken() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); this.con.setWebResourceFromPath(params, this.getClass(), "/error-badtoken.json", CompressionType.NONE); wde.createItemDocument(itemDocument, "My summary", null); } @Test public void testCreateItemCachedToken() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); ItemDocument expectedResultDocument = ItemDocumentBuilder .forItemId(Datamodel.makeWikidataItemIdValue("Q1234")) .withRevisionId(1234).build(); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); // Create item twice wde.createItemDocument(itemDocument, "My summary", null); ItemDocument result = wde .createItemDocument(itemDocument, "My summary", null); assertEquals(expectedResultDocument, result); } @Test public void testCreateItemWikibaseJsonBug() throws IOException, MediaWikiApiErrorException { // Test what happens if the API returns JSON without an actual entity // document and without any respective key WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); ItemDocument expectedResultDocument = ItemDocumentBuilder .forItemId(Datamodel.makeWikidataItemIdValue("Q1234")) .withRevisionId(1234).build(); String expectedResult = "{\"entity\":" + "{\"type\":\"item\",\"aliases\":[],\"labels\":[],\"descriptions\":[],\"lastrevid\":1234,\"sitelinks\":[],\"id\":\"Q1234\",\"claims\":[]}" + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); ItemDocument result = wde .createItemDocument(itemDocument, "My summary", null); assertEquals(expectedResultDocument, result); } @Test(expected = IOException.class) public void testCreateItemBadEntityDocumentJson() throws IOException, MediaWikiApiErrorException { // Test what happens if the API returns JSON without an actual entity // document, but with a respective key pointing to an empty object WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); String expectedResult = "{\"entity\":" + "{}" + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); wde.createItemDocument(itemDocument, "My summary", null); } @Test(expected = IOException.class) public void testCreateItemMissingEntityDocumentJson() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemDocument itemDocument = ItemDocumentBuilder.forItemId( ItemIdValue.NULL).build(); String expectedResult = "{\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("summary", "My summary"); params.put("new", "item"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); wde.createItemDocument(itemDocument, "My summary", null); } @Test public void testCreatePropertyBot() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setEditAsBot(true); PropertyDocument propertyDocument = PropertyDocumentBuilder .forPropertyIdAndDatatype(PropertyIdValue.NULL, DatatypeIdValue.DT_ITEM).build(); PropertyDocument expectedResultDocument = PropertyDocumentBuilder .forPropertyIdAndDatatype( Datamodel.makeWikidataPropertyIdValue("P1234"), DatatypeIdValue.DT_ITEM).withRevisionId(1234).build(); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("new", "property"); params.put("bot", ""); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(propertyDocument); params.put("data", data); con.setWebResource(params, expectedResult); EntityDocument result = wde.createEntityDocument(propertyDocument, null, null); assertTrue(wde.editAsBot()); assertEquals(expectedResultDocument, result); } @Test @Deprecated public void testEditItem() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withRevisionId(1234).build(); ItemDocument expectedResultDocument = ItemDocumentBuilder.forItemId(id) .withRevisionId(1235).build(); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("id", "Q1234"); params.put("summary", "My summary"); params.put("tags", "tag1|tag2"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); ItemDocument result = wde.editItemDocument(itemDocument, false, "My summary", Arrays.asList("tag1", "tag2")); assertEquals(expectedResultDocument, result); } @Test public void testCreateMediaInfo() { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIMEDIA_COMMONS); MediaInfoDocument mediaInfoDocument = Datamodel.makeMediaInfoDocument(MediaInfoIdValue.NULL) .withLabel(Datamodel.makeMonolingualTextValue("test", "en")); assertThrows(UnsupportedOperationException.class, () -> wde.createEntityDocument(mediaInfoDocument, "summary", Collections.emptyList())); } @Test @Deprecated public void testEditMediaInfo() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIMEDIA_COMMONS); MediaInfoIdValue id = Datamodel.makeWikimediaCommonsMediaInfoIdValue("M12223"); MediaInfoDocument mediaInfoDocument = Datamodel.makeMediaInfoDocument(id) .withLabel(Datamodel.makeMonolingualTextValue("test", "en")); MediaInfoDocument expectedResultDocument = Datamodel.makeMediaInfoDocument(id) .withLabel(Datamodel.makeMonolingualTextValue("test", "en")) .withRevisionId(1235); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("id", "M12223"); params.put("summary", "My summary"); params.put("tags", "tag1|tag2"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(mediaInfoDocument); params.put("data", data); con.setWebResource(params, expectedResult); MediaInfoDocument result = wde.editMediaInfoDocument(mediaInfoDocument, false, "My summary", Arrays.asList("tag1", "tag2")); assertEquals(expectedResultDocument, result); } @Test @Deprecated public void testStatementUpdateWithoutChanges() throws MediaWikiApiErrorException, IOException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); ItemIdValue Q5 = Datamodel.makeWikidataItemIdValue("Q5"); PropertyIdValue P31 = Datamodel.makeWikidataPropertyIdValue("P31"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); Statement s1dup = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(id).build(); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withRevisionId(1234).build(); wde.setRemainingEdits(10); ItemDocument editedItemDocument = wde.updateStatements( itemDocument, Collections.singletonList(s1dup), Collections.singletonList(s2), "Doing spurious changes", null); // no edit was made at all assertEquals(itemDocument, editedItemDocument); assertEquals(10, wde.getRemainingEdits()); } @Test @Deprecated public void testTermStatementUpdateWithoutChanges() throws MediaWikiApiErrorException, IOException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); MonolingualTextValue label = Datamodel.makeMonolingualTextValue("My label", "en"); MonolingualTextValue description = Datamodel.makeMonolingualTextValue("Meine Beschreibung", "de"); MonolingualTextValue alias = Datamodel.makeMonolingualTextValue("Mon alias", "fr"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); Statement s1dup = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(id).build(); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withLabel(label) .withDescription(description) .withStatement(s1) .withRevisionId(1234).build(); wde.setRemainingEdits(10); ItemDocument editedItemDocument = wde.updateTermsStatements( itemDocument, Collections.singletonList(label), Collections.singletonList(description), Collections.emptyList(), Collections.singletonList(alias), Collections.singletonList(s1dup), Collections.singletonList(s2), "Doing spurious changes", null); // no edit was made at all assertEquals(itemDocument, editedItemDocument); assertEquals(10, wde.getRemainingEdits()); } @Test public void testNullEdit() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); wde.setRemainingEdits(10); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withRevisionId(1234).build(); Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("id", "Q1234"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("data", "{}"); String data = JsonSerializer.getJsonString(itemDocument); String expectedResult = "{\"entity\":"+data+",\"success\":1}"; con.setWebResource(params, expectedResult); ItemDocument nullEditedItemDocument = wde.nullEdit(itemDocument); assertEquals(itemDocument, nullEditedItemDocument); assertEquals(9, wde.getRemainingEdits()); } @Test @Deprecated public void testLabelEdit() throws MediaWikiApiErrorException, IOException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); MonolingualTextValue label = Datamodel.makeMonolingualTextValue("My label", "en"); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withRevisionId(1234) .build(); ItemDocument expectedDocument = ItemDocumentBuilder.forItemId(id) .withLabel(label) .withStatement(s1) .withRevisionId(1235) .build(); Map params = new HashMap<>(); params.put("action", "wbsetlabel"); params.put("id", "Q1234"); params.put("summary", "Adding a label"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("language", "en"); params.put("value", "My label"); String expectedResult = "{\"entity\":{\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"My label\"}},"+ "\"id\":\"Q1234\",\"type\":\"item\",\"lastrevid\":1235},\"success\":1}"; con.setWebResource(params, expectedResult); ItemDocument editedDocument = wde.updateTermsStatements(itemDocument, Collections.singletonList(label), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), "Adding a label", Collections.emptyList()); assertEquals(expectedDocument, editedDocument); } @Test @Deprecated public void testDescriptionEdit() throws MediaWikiApiErrorException, IOException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); MonolingualTextValue description = Datamodel.makeMonolingualTextValue("My description", "en"); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withRevisionId(1234) .build(); ItemDocument expectedDocument = ItemDocumentBuilder.forItemId(id) .withDescription(description) .withStatement(s1) .withRevisionId(1235L) .build(); Map params = new HashMap<>(); params.put("action", "wbsetdescription"); params.put("id", "Q1234"); params.put("summary", "Adding a description"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("language", "en"); params.put("value", "My description"); String expectedResult = "{\"entity\":{\"descriptions\":{\"en\":{\"language\":\"en\",\"value\":\"My description\"}},"+ "\"id\":\"Q1234\",\"type\":\"item\",\"lastrevid\":1235},\"success\":1}"; con.setWebResource(params, expectedResult); ItemDocument editedDocument = wde.updateTermsStatements(itemDocument, Collections.emptyList(), Collections.singletonList(description), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), "Adding a description", null); assertEquals(expectedDocument, editedDocument); } @Test @Deprecated public void testAliasEdit() throws MediaWikiApiErrorException, IOException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); MonolingualTextValue label = Datamodel.makeMonolingualTextValue("My label", "en"); MonolingualTextValue addedAlias = Datamodel.makeMonolingualTextValue("My added alias", "en"); MonolingualTextValue removedAlias = Datamodel.makeMonolingualTextValue("My removed alias", "en"); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withLabel(label) .withAlias(removedAlias) .withRevisionId(1234) .build(); ItemDocument expectedDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withLabel(label) .withAlias(addedAlias) .withRevisionId(1235) .build(); Map params = new HashMap<>(); params.put("action", "wbsetaliases"); params.put("id", "Q1234"); params.put("summary", "Changing aliases"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("language", "en"); params.put("add", "My added alias"); params.put("remove", "My removed alias"); String expectedResult = "{\"entity\":{\"aliases\":{\"en\":[{\"language\":\"en\",\"value\":\"My added alias\"}]},"+ "\"id\":\"Q1234\",\"type\":\"item\",\"lastrevid\":1235},\"success\":1}"; con.setWebResource(params, expectedResult); ItemDocument editedDocument = wde.updateTermsStatements(itemDocument, Collections.emptyList(), Collections.emptyList(), Collections.singletonList(addedAlias), Collections.singletonList(removedAlias), Collections.emptyList(), Collections.emptyList(), "Changing aliases", null); assertEquals(expectedDocument, editedDocument); } @Test @Deprecated public void testNewSingleStatement() throws MediaWikiApiErrorException, IOException { String guid = "8372EF7A-B72C-7DE2-98D0-DFB4-8EC8392AC28E"; WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA, new MockGuidGenerator(guid)); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("Q1234$"+guid).build(); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withRevisionId(1234) .build(); ItemDocument expectedDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s2) .withRevisionId(1235) .build(); String statementJson = JsonSerializer.getJsonString(s2); Map params = new HashMap<>(); params.put("action", "wbsetclaim"); params.put("summary", "Adding a claim"); params.put("tags", "statement-creation"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("claim", statementJson); String expectedResult = "{\"pageinfo\":{\"lastrevid\":1235},\"success\":1,\"claim\":"+statementJson+"}"; con.setWebResource(params, expectedResult); ItemDocument editedDocument = wde.updateTermsStatements(itemDocument, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.singletonList(s1), Collections.emptyList(), "Adding a claim", Collections.singletonList("statement-creation")); assertEquals(expectedDocument, editedDocument); } @Test @Deprecated public void testDeleteStatements() throws MediaWikiApiErrorException, IOException { String guid1 = "8372EF7A-B72C-7DE2-98D0-DFB4-8EC8392AC28E"; String guid2 = "4311895D-9091-4BC9-9B34-DFB4-1B00EE8CFA62"; WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("Q1234$"+guid1).build(); Statement s2 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("Q1234$"+guid2).build(); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withRevisionId(1234) .withStatement(s1) .withStatement(s2) .build(); ItemDocument expectedDocument = ItemDocumentBuilder.forItemId(id) .withRevisionId(1235) .build(); List statementIds = Arrays.asList("Q1234$"+guid1, "Q1234$"+guid2); String statementsList = String.join("|", statementIds); Map params = new HashMap<>(); params.put("action", "wbremoveclaims"); params.put("summary", "Removing claims"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("claim", statementsList); String expectedResult = "{\"pageinfo\":{\"lastrevid\":1235},\"success\":1,\"claims\":[\""+statementIds.get(0)+"\",\""+statementIds.get(1)+"\"]}"; con.setWebResource(params, expectedResult); ItemDocument editedDocument = wde.updateTermsStatements(itemDocument, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Arrays.asList(s1,s2), "Removing claims", null); assertEquals(expectedDocument, editedDocument); } @Test @Deprecated public void testEditProperty() throws IOException, MediaWikiApiErrorException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); PropertyIdValue id = Datamodel.makeWikidataPropertyIdValue("P1234"); PropertyDocument itemDocument = PropertyDocumentBuilder .forPropertyIdAndDatatype(id, DatatypeIdValue.DT_ITEM) .withRevisionId(1234).build(); PropertyDocument expectedResultDocument = PropertyDocumentBuilder .forPropertyIdAndDatatype(id, DatatypeIdValue.DT_ITEM) .withRevisionId(1235).build(); String resultData = JsonSerializer .getJsonString(expectedResultDocument); String expectedResult = "{\"entity\":" + resultData + ",\"success\":1}"; Map params = new HashMap<>(); params.put("action", "wbeditentity"); params.put("id", "P1234"); params.put("summary", "My summary"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("clear", ""); params.put("baserevid", "1234"); params.put("maxlag", "5"); String data = JsonSerializer.getJsonString(itemDocument); params.put("data", data); con.setWebResource(params, expectedResult); PropertyDocument result = wde.editPropertyDocument(itemDocument, true, "My summary", Collections.emptyList()); assertEquals(expectedResultDocument, result); } @Deprecated @SuppressWarnings("deprecation") @Test(expected = TagsApplyNotAllowedException.class) public void testApplyInvalidTag() throws MediaWikiApiErrorException, IOException { WikibaseDataEditor wde = new WikibaseDataEditor(this.con, Datamodel.SITE_WIKIDATA); ItemIdValue id = Datamodel.makeWikidataItemIdValue("Q1234"); Statement s1 = StatementBuilder.forSubjectAndProperty(id, P31) .withValue(Q5).withId("ID-s1").build(); MonolingualTextValue description = Datamodel.makeMonolingualTextValue("My description", "en"); ItemDocument itemDocument = ItemDocumentBuilder.forItemId(id) .withStatement(s1) .withRevisionId(1234) .build(); Map params = new HashMap<>(); params.put("action", "wbsetdescription"); params.put("id", "Q1234"); params.put("summary", "testing tags"); params.put("tags", "tag_which_does_not_exist"); params.put("token", "42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"); params.put("format", "json"); params.put("baserevid", "1234"); params.put("maxlag", "5"); params.put("language", "en"); params.put("value", "My description"); String expectedResult = "{\"error\":" + "{\"code\":\"tags-apply-not-allowed-one\"," + "\"info\":\"The tag \\\"tag_which_does_not_exist\\\" is not allowed to be manually applied.\"," + "\"*\":\"See https://www.wikidata.org/w/api.php for API usage. Subscribe to the mediawiki-api-announce mailing list at <https://lists.wikimedia.org/mailman/listinfo/mediawiki-api-announce> for notice of API deprecations and breaking changes.\"}," + "\"servedby\":\"mw1276\"}"; con.setWebResource(params, expectedResult); wde.updateTermsStatements(itemDocument, Collections.emptyList(), Collections.singletonList(description), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), "testing tags", Collections.singletonList("tag_which_does_not_exist")); } private EditingResult mockEntityUpdate(WbEditingAction action, EntityUpdate update) throws MediaWikiApiErrorException, IOException { WikibaseDataFetcher fetcher = new WikibaseDataFetcher(con, Datamodel.SITE_WIKIDATA); WikibaseDataEditor wde = new WikibaseDataEditor(action, fetcher, Datamodel.SITE_WIKIDATA, guids); return wde.editEntityDocument(update, false, "test summary", Arrays.asList("tag1")); } private JsonNode json(String resourceFileName) { try { String contents = MockStringContentFactory .getStringFromUrl(WikibaseDataEditor.class.getResource(resourceFileName)); return mapper.readTree(contents); } catch (Exception e) { throw new IllegalArgumentException("Unable to read test JSON resource "+resourceFileName); } } @Test public void testReductionToSetNewClaim() throws MediaWikiApiErrorException, IOException { ItemIdValue subject = makeWikidataItemIdValue("Q1"); Statement statement = StatementBuilder.forSubjectAndProperty(subject, makeWikidataPropertyIdValue("P1")) .withValue(makeQuantityValue(new BigDecimal("456"))) .build(); Statement statementWithId = statement.withStatementId(guids.freshStatementId(subject.getId())); WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetClaim( JsonSerializer.getJsonString(statementWithId), false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetclaim.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder.forBaseRevisionId(subject, 123) .updateStatements(StatementUpdateBuilder.create() .add(statement) .build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetClaim( JsonSerializer.getJsonString(statementWithId), false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSetExistingClaim() throws MediaWikiApiErrorException, IOException { ItemIdValue subject = makeWikidataItemIdValue("Q1"); Statement statement = StatementBuilder.forSubjectAndProperty(subject, makeWikidataPropertyIdValue("P1")) .withValue(makeQuantityValue(new BigDecimal("456"))) .withId(guids.freshStatementId(subject.getId())) .build(); WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetClaim( JsonSerializer.getJsonString(statement), false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetclaim.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder.forBaseRevisionId(subject, 123) .updateStatements(StatementUpdateBuilder.create().replace(statement).build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetClaim( JsonSerializer.getJsonString(statement), false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToRemoveClaims() throws MediaWikiApiErrorException, IOException { ItemIdValue subject = makeWikidataItemIdValue("Q1"); String id = guids.freshStatementId(subject.getId()); WbEditingAction action = mock(WbEditingAction.class); when(action.wbRemoveClaims(Arrays.asList(id), false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbremoveclaims.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder.forBaseRevisionId(subject, 123) .updateStatements(StatementUpdateBuilder.create().remove(id).build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbRemoveClaims(Arrays.asList(id), false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSetLabel() throws MediaWikiApiErrorException, IOException { WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetLabel( "Q1", null, null, null, "en", "hello", false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetlabel.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder .forBaseRevisionId(makeWikidataItemIdValue("Q1"), 123) .updateLabels(TermUpdateBuilder.create() .put(Datamodel.makeMonolingualTextValue("hello", "en")) .build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetLabel( "Q1", null, null, null, "en", "hello", false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSetNullLabel() throws MediaWikiApiErrorException, IOException { WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetLabel( "Q1", null, null, null, "en", null, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetlabel-null.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder .forBaseRevisionId(makeWikidataItemIdValue("Q1"), 123) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetLabel( "Q1", null, null, null, "en", null, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSetDescription() throws MediaWikiApiErrorException, IOException { WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetDescription( "Q1", null, null, null, "en", "hello", false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetdescription.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder .forBaseRevisionId(makeWikidataItemIdValue("Q1"), 123) .updateDescriptions(TermUpdateBuilder.create() .put(Datamodel.makeMonolingualTextValue("hello", "en")) .build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetDescription( "Q1", null, null, null, "en", "hello", false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSetNullDescription() throws MediaWikiApiErrorException, IOException { WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetDescription( "Q1", null, null, null, "en", null, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetdescription-null.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder .forBaseRevisionId(makeWikidataItemIdValue("Q1"), 123) .updateDescriptions(TermUpdateBuilder.create().remove("en").build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetDescription( "Q1", null, null, null, "en", null, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSetAliases() throws MediaWikiApiErrorException, IOException { WbEditingAction action = mock(WbEditingAction.class); when(action.wbSetAliases("Q1", null, null, null, "en", Arrays.asList("hello"), Arrays.asList("bye"), null, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(json("/wbsetaliases-add-remove.json")); EditingResult result = mockEntityUpdate(action, ItemUpdateBuilder .forBaseRevisionId(makeWikidataItemIdValue("Q1"), 123) .updateAliases("en", AliasUpdateBuilder.create() .add(Datamodel.makeMonolingualTextValue("hello", "en")) .remove(Datamodel.makeMonolingualTextValue("bye", "en")) .build()) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbSetAliases("Q1", null, null, null, "en", Arrays.asList("hello"), Arrays.asList("bye"), null, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToSenseEdit() throws MediaWikiApiErrorException, IOException { SenseUpdate update = SenseUpdateBuilder .forEntityId(makeWikidataSenseIdValue("L1-S1")) .updateGlosses(TermUpdateBuilder.create().remove("en").build()) .build(); SenseDocument senseDocument = mock(SenseDocument.class); when(senseDocument.getRevisionId()).thenReturn(1234L); WbEditingAction action = mock(WbEditingAction.class); when(action.wbEditEntity("L1-S1", null, null, null, JsonSerializer.getJsonString(update), false, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(senseDocument); EditingResult result = mockEntityUpdate(action, LexemeUpdateBuilder .forBaseRevisionId(makeWikidataLexemeIdValue("L1"), 123) .updateSense(update) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbEditEntity("L1-S1", null, null, null, JsonSerializer.getJsonString(update), false, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToFormEdit() throws MediaWikiApiErrorException, IOException { FormUpdate update = FormUpdateBuilder .forEntityId(makeWikidataFormIdValue("L1-F1")) .updateRepresentations(TermUpdateBuilder.create().remove("en").build()) .build(); FormDocument formDocument = mock(FormDocument.class); when(formDocument.getRevisionId()).thenReturn(1234L); WbEditingAction action = mock(WbEditingAction.class); when(action.wbEditEntity("L1-F1", null, null, null, JsonSerializer.getJsonString(update), false, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(formDocument); EditingResult result = mockEntityUpdate(action, LexemeUpdateBuilder .forBaseRevisionId(makeWikidataLexemeIdValue("L1"), 123) .updateForm(update) .build()); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbEditEntity("L1-F1", null, null, null, JsonSerializer.getJsonString(update), false, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testUnreducedEntityEdit() throws MediaWikiApiErrorException, IOException { LexemeUpdate update = LexemeUpdateBuilder .forBaseRevisionId(makeWikidataLexemeIdValue("L1"), 123) .setLanguage(Datamodel.makeWikidataItemIdValue("Q1")) .build(); LexemeDocument lexemeDocument = mock(LexemeDocument.class); when(lexemeDocument.getRevisionId()).thenReturn(1234L); WbEditingAction action = mock(WbEditingAction.class); when(action.wbEditEntity("L1", null, null, null, JsonSerializer.getJsonString(update), false, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(lexemeDocument); EditingResult result = mockEntityUpdate(action, update); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbEditEntity("L1", null, null, null, JsonSerializer.getJsonString(update), false, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testIrreducibleClearingEdit() throws MediaWikiApiErrorException, IOException { ItemUpdate update = ItemUpdateBuilder .forBaseRevisionId(makeWikidataItemIdValue("Q1"), 123) .updateLabels(TermUpdateBuilder.create().remove("en").build()) .build(); ItemDocument itemDocument = mock(ItemDocument.class); when(itemDocument.getRevisionId()).thenReturn(1234L); WbEditingAction action = mock(WbEditingAction.class); when(action.wbEditEntity("Q1", null, null, null, JsonSerializer.getJsonString(update), true, false, 123, "test summary", Arrays.asList("tag1"))) .thenReturn(itemDocument); WikibaseDataFetcher fetcher = new WikibaseDataFetcher(con, Datamodel.SITE_WIKIDATA); WikibaseDataEditor wde = new WikibaseDataEditor(action, fetcher, Datamodel.SITE_WIKIDATA, guids); EditingResult result = wde.editEntityDocument(update, true, "test summary", Arrays.asList("tag1")); assertEquals(result, new EditingResult(1234L)); verify(action, only()).wbEditEntity("Q1", null, null, null, JsonSerializer.getJsonString(update), true, false, 123, "test summary", Arrays.asList("tag1")); } @Test public void testReductionToNoEdit() throws MediaWikiApiErrorException, IOException { WbEditingAction action = mock(WbEditingAction.class); EditingResult result = mockEntityUpdate(action, LexemeUpdateBuilder .forBaseRevisionId(makeWikidataLexemeIdValue("L1"), 123) .build()); assertEquals(result, new EditingResult(0L)); verifyNoInteractions(action); } } WikibaseDataFetcherTest.java000066400000000000000000000363771444772566300360540ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapipackage org.wikidata.wdtk.wikibaseapi; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.util.*; import org.junit.Before; import org.junit.Test; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue; import org.wikidata.wdtk.util.CompressionType; import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException; import org.wikidata.wdtk.wikibaseapi.apierrors.NoSuchEntityErrorException; import static org.junit.Assert.*; public class WikibaseDataFetcherTest { MockBasicApiConnection con; WikibaseDataFetcher wdf; @Before public void setUp() { con = new MockBasicApiConnection(); wdf = new WikibaseDataFetcher(con, Datamodel.SITE_WIKIDATA); } @Test public void testWbGetEntities() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); setStandardParameters(parameters); parameters.put("ids", "Q6|Q42|P31"); con.setWebResourceFromPath(parameters, this.getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); Map results = wdf.getEntityDocuments("Q6", "Q42", "P31"); assertEquals(2, results.size()); assertFalse(results.containsKey("Q6")); assertTrue(results.containsKey("Q42")); assertTrue(results.containsKey("P31")); } @Test public void testGetEntityDocument() throws IOException, MediaWikiApiErrorException { // We use the mock answer as for a multi request; no problem Map parameters = new HashMap<>(); setStandardParameters(parameters); parameters.put("ids", "Q42"); con.setWebResourceFromPath(parameters, this.getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocument("Q42"); assertNotNull(result); } @Test public void testGetMissingEntityDocument() throws IOException, MediaWikiApiErrorException { // List entityIds = Arrays.asList("Q6"); Map parameters = new HashMap<>(); setStandardParameters(parameters); parameters.put("ids", "Q6"); // We use the mock answer as for a multi request; no problem con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocument("Q6"); assertNull(result); } @Test(expected = NoSuchEntityErrorException.class) public void testWbGetEntitiesError() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); setStandardParameters(parameters); parameters.put("ids", "bogus"); // We use the mock answer as for a multi request; no problem con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-bogus.json", CompressionType.NONE); wdf.getEntityDocuments("bogus"); } @Test public void testWbGetEntitiesEmpty() throws IOException, MediaWikiApiErrorException { Map results = wdf .getEntityDocuments(Collections.emptyList()); assertEquals(0, results.size()); } @Test public void testWbGetEntitiesTitle() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); this.setStandardParameters(parameters); parameters.put("titles", "Douglas Adams"); parameters.put("sites", "enwiki"); con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-Douglas-Adams.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocumentByTitle("enwiki", "Douglas Adams"); assertEquals("Q42", result.getEntityId().getId()); } @Test public void testWbGetEntitiesTitleEmpty() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); this.setStandardParameters(parameters); parameters.put("titles", "1234567890"); parameters.put("sites", "dewiki"); con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-1234567890-missing.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocumentByTitle("dewiki", "1234567890"); assertNull(result); } @Test public void testWbGetMediaInfoEntityFromId() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); this.setStandardParameters(parameters); parameters.put("ids", "M65057"); con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-RandomImage.jpg.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocument("M65057"); assertEquals("M65057", result.getEntityId().getId()); } @Test public void testWbGetMediaInfoEntityFromTitle() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); this.setStandardParameters(parameters); parameters.put("titles", "File:RandomImage 4658098723742867.jpg"); parameters.put("sites", "commonswiki"); con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-RandomImage.jpg.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocumentByTitle("commonswiki", "File:RandomImage 4658098723742867.jpg"); assertEquals("M65057", result.getEntityId().getId()); } @Test public void testGetMediaInfoId() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Albert Einstein Head.jpg"); con.setWebResourceFromPath(parameters, getClass(), "/query-Albert Einstein Head.jpg.json", CompressionType.NONE); MediaInfoIdValue result = wdf.getMediaInfoIdByFileName("File:Albert Einstein Head.jpg"); assertEquals("M925243", result.getId()); } @Test public void testGetMediaInfoIdWithoutPrefix() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Albert Einstein Head.jpg"); con.setWebResourceFromPath(parameters, getClass(), "/query-Albert Einstein Head.jpg.json", CompressionType.NONE); MediaInfoIdValue result = wdf.getMediaInfoIdByFileName("Albert Einstein Head.jpg"); assertEquals("M925243", result.getId()); } @Test public void testGetMediaInfoIdNormalized() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Albert_Einstein_Head.jpg"); con.setWebResourceFromPath(parameters, getClass(), "/query-Albert Einstein Head normalized.jpg.json", CompressionType.NONE); MediaInfoIdValue result = wdf.getMediaInfoIdByFileName("File:Albert_Einstein_Head.jpg"); assertEquals("M925243", result.getId()); } @Test public void testGetMediaInfoIdNormalizedWithoutPrefix() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Albert_Einstein_Head.jpg"); con.setWebResourceFromPath(parameters, getClass(), "/query-Albert Einstein Head normalized.jpg.json", CompressionType.NONE); MediaInfoIdValue result = wdf.getMediaInfoIdByFileName("Albert_Einstein_Head.jpg"); assertEquals("M925243", result.getId()); } @Test public void testGetMediaInfoIdDuplicated1() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Cat.jpg|File:Cat.jpg"); con.setWebResourceFromPath(parameters, getClass(), "/query-Cat.jpg.json", CompressionType.NONE); Map result = wdf.getMediaInfoIdsByFileName("Cat.jpg", "Cat.jpg"); assertEquals(result.size(), 1); assertEquals("M32455073", result.get("Cat.jpg").getId()); } @Test public void testGetMediaInfoIdDuplicated2() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Cat.jpg|File:Cat.jpg"); con.setWebResourceFromPath(parameters, getClass(), "/query-Cat.jpg.json", CompressionType.NONE); Map result = wdf.getMediaInfoIdsByFileName("Cat.jpg", "File:Cat.jpg"); assertEquals(result.size(), 2); assertEquals("M32455073", result.get("Cat.jpg").getId()); assertEquals("M32455073", result.get("File:Cat.jpg").getId()); } @Test public void testGetMediaInfoIdNotFound() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Not Found"); con.setWebResourceFromPath(parameters, getClass(), "/query-Not Found.json", CompressionType.NONE); MediaInfoIdValue result = wdf.getMediaInfoIdByFileName("Not Found"); assertNull(result); } @Test public void testGetMediaInfoIdNotFoundTwice() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); parameters.put("action", "query"); parameters.put("format", "json"); parameters.put("titles", "File:Not Found|File:Not Found Either"); con.setWebResourceFromPath(parameters, getClass(), "/query-Not Found twice.json", CompressionType.NONE); Map result = wdf.getMediaInfoIdsByFileName("Not Found", "Not Found Either"); assertEquals(result.size(), 2); assertNull(result.get("Not Found")); assertNull(result.get("Not Found Either")); } @Test public void testWbGetVirtualMediaInfoEntityFromTitle() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); this.setStandardParameters(parameters); parameters.put("titles", "File:Test.jpg"); parameters.put("sites", "commonswiki"); con.setWebResourceFromPath(parameters, getClass(), "/wbgetentities-virtual-Test.jpg.json", CompressionType.NONE); EntityDocument result = wdf.getEntityDocumentByTitle("commonswiki", "File:Test.jpg"); assertEquals("M4215516", result.getEntityId().getId()); } @Test public void testWikidataDataFetcher() { WikibaseDataFetcher wbdf = WikibaseDataFetcher.getWikidataDataFetcher(); assertEquals(Datamodel.SITE_WIKIDATA, wbdf.siteIri); assertEquals(ApiConnection.URL_WIKIDATA_API, wbdf.wbGetEntitiesAction.connection.apiBaseUrl); } @Test public void testWbGetEntitesSplitted() throws IOException, MediaWikiApiErrorException { List entityIds = Arrays.asList("Q6", "Q42", "P31", "Q1"); Map parameters1 = new HashMap<>(); setStandardParameters(parameters1); parameters1.put("ids", "Q6|Q42|P31"); Map parameters2 = new HashMap<>(); setStandardParameters(parameters2); parameters2.put("ids", "Q1"); con.setWebResourceFromPath(parameters1, this.getClass(), "/wbgetentities-Q6-Q42-P31.json", CompressionType.NONE); con.setWebResourceFromPath(parameters2, this.getClass(), "/wbgetentities-Q1.json", CompressionType.NONE); wdf.maxListSize = 3; Map results = wdf.getEntityDocuments(entityIds); assertEquals(3, results.size()); assertFalse(results.containsKey("Q6")); assertTrue(results.containsKey("Q1")); assertTrue(results.containsKey("P31")); assertTrue(results.containsKey("Q42")); } @Test public void testGetEntitiesTitleSplitted() throws IOException, MediaWikiApiErrorException { Map parameters1 = new HashMap<>(); this.setStandardParameters(parameters1); parameters1.put("titles", "Douglas Adams"); parameters1.put("sites", "enwiki"); con.setWebResourceFromPath(parameters1, getClass(), "/wbgetentities-Douglas-Adams.json", CompressionType.NONE); Map parameters2 = new HashMap<>(); this.setStandardParameters(parameters2); parameters2.put("titles", "Oliver Kahn"); parameters2.put("sites", "enwiki"); con.setWebResourceFromPath(parameters2, getClass(), "/wbgetentites-Oliver-Kahn.json", CompressionType.NONE); wdf.maxListSize = 1; Map result = wdf.getEntityDocumentsByTitle( "enwiki", "Oliver Kahn", "Douglas Adams"); assertEquals(2, result.keySet().size()); assertEquals("Q42", result.get("Douglas Adams").getEntityId().getId()); assertEquals("Q131261", result.get("Oliver Kahn").getEntityId().getId()); } @Test public void getGetMediaInfoIdsSplitted() throws IOException, MediaWikiApiErrorException { Map parameters1 = new HashMap<>(); parameters1.put("action", "query"); parameters1.put("format", "json"); parameters1.put("titles", "File:Cat.jpg"); con.setWebResourceFromPath(parameters1, getClass(), "/query-Cat.jpg.json", CompressionType.NONE); Map parameters2 = new HashMap<>(); parameters2.put("action", "query"); parameters2.put("format", "json"); parameters2.put("titles", "File:Albert Einstein Head.jpg"); con.setWebResourceFromPath(parameters2, getClass(), "/query-Albert Einstein Head.jpg.json", CompressionType.NONE); wdf.maxListSize = 1; Map result = wdf.getMediaInfoIdsByFileName("Cat.jpg", "File:Albert Einstein Head.jpg"); assertEquals(2, result.size()); assertEquals("M32455073", result.get("Cat.jpg").getId()); assertEquals("M925243", result.get("File:Albert Einstein Head.jpg").getId()); } private void setStandardParameters(Map parameters) { parameters.put("action", "wbgetentities"); parameters.put("format", "json"); parameters.put("props", "info|datatype|labels|aliases|descriptions|claims|sitelinks"); } public void testWbSearchEntities() throws IOException, MediaWikiApiErrorException { Map parameters = new HashMap<>(); setStandardSearchParameters(parameters); parameters.put("search", "abc"); parameters.put("language", "en"); con.setWebResourceFromPath(parameters, this.getClass(), "/wbsearchentities-abc.json", CompressionType.NONE); List results = wdf.searchEntities("abc"); assertEquals(7, results.size()); List expectedIds = new ArrayList<>(); expectedIds.add("Q169889"); expectedIds.add("Q286874"); expectedIds.add("Q781365"); expectedIds.add("Q287076"); expectedIds.add("Q304330"); expectedIds.add("Q1057802"); expectedIds.add("Q26298"); List actualIds = new ArrayList<>(); for (WbSearchEntitiesResult result: results) { actualIds.add(result.getEntityId()); } assertEquals(expectedIds, actualIds); } private void setStandardSearchParameters(Map parameters) { parameters.put("action", "wbsearchentities"); parameters.put("format", "json"); } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapi/apierrors/000077500000000000000000000000001444772566300325655ustar00rootroot00000000000000MediaWikiApiErrorHandlerTest.java000066400000000000000000000046711444772566300410260ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/java/org/wikidata/wdtk/wikibaseapi/apierrorspackage org.wikidata.wdtk.wikibaseapi.apierrors; /* * #%L * Wikidata Toolkit Wikibase API * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.junit.Assert.assertEquals; import org.junit.Test; public class MediaWikiApiErrorHandlerTest { @Test public void testUnknownError() { String code = ""; String message = ""; try { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException("unknown", "some message"); } catch (MediaWikiApiErrorException e) { code = e.getErrorCode(); message = e.getErrorMessage(); } assertEquals("unknown", code); assertEquals("some message", message); } @Test(expected = TokenErrorException.class) public void testNoTokenError() throws MediaWikiApiErrorException { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException( MediaWikiApiErrorHandler.ERROR_NO_TOKEN, "some message"); } @Test(expected = TokenErrorException.class) public void testBadTokenError() throws MediaWikiApiErrorException { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException( MediaWikiApiErrorHandler.ERROR_INVALID_TOKEN, "some message"); } @Test(expected = EditConflictErrorException.class) public void testEditConflictError() throws MediaWikiApiErrorException { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException( MediaWikiApiErrorHandler.ERROR_EDIT_CONFLICT, "some message"); } @Test(expected = NoSuchEntityErrorException.class) public void testNoSuchEntityError() throws MediaWikiApiErrorException { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException( MediaWikiApiErrorHandler.ERROR_NO_SUCH_ENTITY, "some message"); } @Test(expected = MaxlagErrorException.class) public void testMaxlagError() throws MediaWikiApiErrorException { MediaWikiApiErrorHandler.throwMediaWikiApiErrorException( MediaWikiApiErrorHandler.ERROR_MAXLAG, "Waiting for 10.64.16.27: 2 seconds lagged"); } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/000077500000000000000000000000001444772566300240235ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/assert-user-failed.json000066400000000000000000000005441444772566300304200ustar00rootroot00000000000000{"error":{"code":"assertuserfailed","info":"Assertion that the user is logged in failed.","*":"See https://www.wikidata.org/w/api.php for API usage. Subscribe to the mediawiki-api-announce mailing list at <https://lists.wikimedia.org/mailman/listinfo/mediawiki-api-announce> for notice of API deprecations and breaking changes."},"servedby":"mw1347"}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/clientLoginError.json000066400000000000000000000005011444772566300301730ustar00rootroot00000000000000{ "error": { "code": "badtoken", "info": "Invalid CSRF token.", "*": "See http://localhost/w/api.php for API usage. Subscribe to the mediawiki-api-announce mailing list at <https://lists.wikimedia.org/mailman/listinfo/mediawiki-api-announce> for notice of API deprecations and breaking changes." } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/clientLoginSuccess.json000066400000000000000000000001241444772566300305130ustar00rootroot00000000000000{ "clientlogin": { "status": "PASS", "username": "Admin" } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/error-badtoken.json000066400000000000000000000001171444772566300276330ustar00rootroot00000000000000{ "error": { "code": "badtoken", "info": "bla bla" } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/error-maxlag-full.json000066400000000000000000000007271444772566300302640ustar00rootroot00000000000000{ "error": { "code": "maxlag", "info": "Waiting for all: 3.45 seconds lagged.", "host": "all", "lag": 3.45, "type": "wikibase-queryservice", "queryserviceLag": 626, "*": "See https://www.wikidata.org/w/api.php for API usage. Subscribe to the mediawiki-api-announce mailing list at <https://lists.wikimedia.org/mailman/listinfo/mediawiki-api-announce> for notice of API deprecations and breaking changes." }, "servedby": "mw1280" } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/error-maxlag.json000066400000000000000000000001571444772566300273210ustar00rootroot00000000000000{ "error": { "code": "maxlag", "info": "Waiting for 10.64.16.27: 2 seconds lagged" } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/error.json000066400000000000000000000007261444772566300260540ustar00rootroot00000000000000{ "servedby": "mw1147", "error": { "code": "no-such-entity", "info": "Could not find such an entity (Invalid id: Q0)", "messages": [ { "name": "wikibase-api-no-such-entity", "parameters": [], "html": { "*": "Could not find such an entity" } } ], "*": "See https://www.wikidata.org/w/api.php for API usage" } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/loginError.json000066400000000000000000000000571444772566300270420ustar00rootroot00000000000000{ "login": { "result": "NotExists" } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/loginFailed.json000066400000000000000000000001671444772566300271370ustar00rootroot00000000000000{ "login": { "result": "Failed", "reason": "Incorrect username or password entered. Please try again." } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/loginSuccess.json000066400000000000000000000004041444772566300273550ustar00rootroot00000000000000{ "login": { "result": "Success", "lguserid": 12345, "lgusername": "Username", "lgtoken": "b5780b6e2f27e20b450921d9461010b4", "cookieprefix": "enwiki", "sessionid": "17ab96bd8ffbe8ca58a78657a918558e" } } query-Albert Einstein Head normalized.jpg.json000066400000000000000000000005001444772566300344730ustar00rootroot00000000000000Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources{ "batchcomplete": "", "query": { "normalized": [ { "from": "File:Albert_Einstein_Head.jpg", "to": "File:Albert Einstein Head.jpg" } ], "pages": { "925243": { "pageid": 925243, "ns": 6, "title": "File:Albert Einstein Head.jpg" } } } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/query-Albert Einstein Head.jpg.json000066400000000000000000000002661444772566300324360ustar00rootroot00000000000000{ "batchcomplete": "", "query": { "pages": { "925243": { "pageid": 925243, "ns": 6, "title": "File:Albert Einstein Head.jpg" } } } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/query-Cat.jpg.json000066400000000000000000000002511444772566300273450ustar00rootroot00000000000000{ "batchcomplete": "", "query": { "pages": { "32455073": { "pageid": 32455073, "ns": 6, "title": "File:Cat.jpg" } } } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/query-Not Found twice.json000066400000000000000000000004101444772566300307440ustar00rootroot00000000000000{ "batchcomplete": "", "query": { "pages": { "-1": { "ns": 6, "title": "File:Not Found", "missing": "" }, "-2": { "ns": 6, "title": "File:Not Found Either", "missing": "" } } } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/query-Not Found.json000066400000000000000000000002401444772566300276510ustar00rootroot00000000000000{ "batchcomplete": "", "query": { "pages": { "-1": { "ns": 6, "title": "File:Not Found", "missing": "" } } } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/query-csrf-token-loggedin-response.json000066400000000000000000000001431444772566300335540ustar00rootroot00000000000000{"batchcomplete":"","query":{"tokens":{"csrftoken":"42307b93c79b0cb558d2dfb4c3c92e0955e06041+\\"}}}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/query-login-token.json000066400000000000000000000001411444772566300303030ustar00rootroot00000000000000{ "query": { "tokens": { "logintoken": "b5780b6e2f27e20b450921d9461010b4" } } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/warnings.json000066400000000000000000000011021444772566300265400ustar00rootroot00000000000000{ "warnings": { "main": { "*": "Unrecognized parameter: 'rmparam'" }, "query": { "*": "Unrecognized value for parameter 'list': raremodule" }, "wbeditentity": { "messages": [ { "name":"wikibase-self-conflict-patched", "parameters":[], "html": { "*":"Your edit was patched into the latest version, overriding some of your own intermediate changes."} } ] }, "test": { "*": { "unknown": "structure" } } } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentites-Oliver-Kahn.json000066400000000000000000000705631444772566300315520ustar00rootroot00000000000000{"entities":{"Q131261":{"id":"Q131261","type":"item","labels":{"zh-hans":{"language":"zh-hans","value":"\u5965\u5229\u5f17\u00b7\u5361\u6069"},"zh-hant":{"language":"zh-hant","value":"\u5967\u5229\u4f5b\u00b7\u5361\u6069"},"zh-hk":{"language":"zh-hk","value":"\u5967\u5229\u83ef\u00b7\u7c21\u5c3c"},"zh-cn":{"language":"zh-cn","value":"\u5965\u5229\u5f17\u00b7\u5361\u6069"},"zh-sg":{"language":"zh-sg","value":"\u5965\u5229\u5f17\u00b7\u5361\u6069"},"zh-tw":{"language":"zh-tw","value":"\u5967\u5229\u4f5b\u00b7\u5361\u6069"},"jv":{"language":"jv","value":"Oliver Kahn"},"eu":{"language":"eu","value":"Oliver Kahn"},"pl":{"language":"pl","value":"Oliver Kahn"},"bs":{"language":"bs","value":"Oliver Kahn"},"es":{"language":"es","value":"Oliver Kahn"},"hu":{"language":"hu","value":"Oliver Kahn"},"ms":{"language":"ms","value":"Oliver Kahn"},"et":{"language":"et","value":"Oliver Kahn"},"bn":{"language":"bn","value":"\u0985\u09b2\u09bf\u09ad\u09be\u09b0 \u0995\u09be\u09a8"},"sq":{"language":"sq","value":"Oliver Kahn"},"el":{"language":"el","value":"\u038c\u03bb\u03b9\u03b2\u03b5\u03c1 \u039a\u03b1\u03bd"},"nl":{"language":"nl","value":"Oliver Kahn"},"ar":{"language":"ar","value":"\u0623\u0648\u0644\u064a\u0641\u0631 \u0643\u0627\u0646"},"sv":{"language":"sv","value":"Oliver Kahn"},"pt":{"language":"pt","value":"Oliver Kahn"},"eo":{"language":"eo","value":"Oliver Kahn"},"is":{"language":"is","value":"Oliver Kahn"},"ru":{"language":"ru","value":"\u041a\u0430\u043d, \u041e\u043b\u0438\u0432\u0435\u0440"},"sr-el":{"language":"sr-el","value":"Oliver Kan"},"sr-ec":{"language":"sr-ec","value":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d"},"tr":{"language":"tr","value":"Oliver Kahn"},"fi":{"language":"fi","value":"Oliver Kahn"},"uk":{"language":"uk","value":"\u041e\u043b\u0456\u0432\u0435\u0440 \u041a\u0430\u043d"},"hr":{"language":"hr","value":"Oliver Kahn"},"da":{"language":"da","value":"Oliver Kahn"},"fr":{"language":"fr","value":"Oliver Kahn"},"ko":{"language":"ko","value":"\uc62c\ub9ac\ubc84 \uce78"},"he":{"language":"he","value":"\u05d0\u05d5\u05dc\u05d9\u05d1\u05e8 \u05e7\u05d0\u05df"},"bar":{"language":"bar","value":"Oliver Kahn"},"lv":{"language":"lv","value":"Olivers K\u0101ns"},"it":{"language":"it","value":"Oliver Kahn"},"hif":{"language":"hif","value":"Oliver Kahn"},"id":{"language":"id","value":"Oliver Kahn"},"de":{"language":"de","value":"Oliver Kahn"},"ja":{"language":"ja","value":"\u30aa\u30ea\u30d0\u30fc\u30fb\u30ab\u30fc\u30f3"},"vi":{"language":"vi","value":"Oliver Kahn"},"yue":{"language":"yue","value":"\u7c21\u5c3c"},"en":{"language":"en","value":"Oliver Kahn"},"sh":{"language":"sh","value":"Oliver Kahn"},"sk":{"language":"sk","value":"Oliver Kahn"},"ro":{"language":"ro","value":"Oliver Kahn"},"ca":{"language":"ca","value":"Oliver Rolf Kahn"},"sl":{"language":"sl","value":"Oliver Kahn"},"cs":{"language":"cs","value":"Oliver Kahn"},"mr":{"language":"mr","value":"\u0913\u0932\u093f\u092b\u0930 \u0915\u093e\u0928"},"bg":{"language":"bg","value":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d"},"fa":{"language":"fa","value":"\u0627\u0644\u06cc\u0648\u0631 \u06a9\u0627\u0646"},"ka":{"language":"ka","value":"\u10dd\u10da\u10d8\u10d5\u10d4\u10e0 \u10d9\u10d0\u10dc\u10d8"},"lt":{"language":"lt","value":"Oliver Kahn"},"zh":{"language":"zh","value":"\u5965\u5229\u5f17\u00b7\u5361\u6069"},"tk":{"language":"tk","value":"Oliwer Kan"},"sr":{"language":"sr","value":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d"},"af":{"language":"af","value":"Oliver Kahn"},"an":{"language":"an","value":"Oliver Kahn"},"br":{"language":"br","value":"Oliver Kahn"},"ga":{"language":"ga","value":"Oliver Kahn"},"gl":{"language":"gl","value":"Oliver Kahn"},"lb":{"language":"lb","value":"Oliver Kahn"},"lmo":{"language":"lmo","value":"Oliver Kahn"},"nds-nl":{"language":"nds-nl","value":"Oliver Kahn"},"nn":{"language":"nn","value":"Oliver Kahn"},"pms":{"language":"pms","value":"Oliver Kahn"},"wa":{"language":"wa","value":"Oliver Kahn"},"az":{"language":"az","value":"Oliver Kan"},"ckb":{"language":"ckb","value":"\u0626\u06c6\u0644\u06cc\u06a4\u06d5\u0631 \u06a9\u0627\u0646"},"nb":{"language":"nb","value":"Oliver Kahn"},"gsw":{"language":"gsw","value":"Oliver Kahn"},"io":{"language":"io","value":"Oliver Kahn"},"hy":{"language":"hy","value":"\u0555\u056c\u056b\u057e\u0565\u0580 \u053f\u0561\u0576"},"mk":{"language":"mk","value":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d"},"qu":{"language":"qu","value":"Oliver Kahn"},"sco":{"language":"sco","value":"Oliver Kahn"},"mg":{"language":"mg","value":"Oliver Kahn"},"ne":{"language":"ne","value":"\u0913\u0932\u093f\u092d\u0930 \u0915\u093e\u0928"}},"aliases":{"zh":[{"language":"zh","value":"\u7c21\u5c3c"},{"language":"zh","value":"\u5967\u5229\u4f5b\u00b7\u5361\u6069"},{"language":"zh","value":"\u5965\u7acb\u5f17\u00b7\u5361\u6069"},{"language":"zh","value":"\u5967\u5229\u4f5b\u00b7\u7c21\u5c3c"}],"jv":[{"language":"jv","value":"Oliver Rolf Kahn"}],"es":[{"language":"es","value":"Oliver rolf kahn"},{"language":"es","value":"Olli kahn"}],"bn":[{"language":"bn","value":"Oli Kahn"},{"language":"bn","value":"Shmoliver Blahn"},{"language":"bn","value":"King Kahn"},{"language":"bn","value":"Oliver Kahn"},{"language":"bn","value":"Jens Mustermann"},{"language":"bn","value":"Kinh Kahn"},{"language":"bn","value":"Olli Kahn"},{"language":"bn","value":"Oliver Khan"}],"sq":[{"language":"sq","value":"Oliver Kan"}],"ar":[{"language":"ar","value":"\u0627\u0648\u0644\u064a\u0641\u0631 \u0643\u0627\u0646"},{"language":"ar","value":"\u062c\u0627\u0626\u0632\u0629 \u0627\u0644\u0643\u0631\u0629 \u0627\u0644\u0630\u0647\u0628\u064a\u0629 \u0627\u0644\u0623\u0648\u0631\u0648\u0628\u064a\u0629"}],"sv":[{"language":"sv","value":"Kahn"}],"ru":[{"language":"ru","value":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d"},{"language":"ru","value":"Oliver Kahn"},{"language":"ru","value":"\u041a\u0430\u043d\u043d, \u041e\u043b\u0438\u0432\u0435\u0440"},{"language":"ru","value":"\u041a\u0430\u043d \u041e."},{"language":"ru","value":"\u041a\u0430\u043d \u041e\u043b\u0438\u0432\u0435\u0440"}],"tr":[{"language":"tr","value":"Oliver Rolf Kahn"}],"uk":[{"language":"uk","value":"\u041a\u0430\u043d \u041e\u043b\u0456\u0432\u0435\u0440"}],"da":[{"language":"da","value":"Olivier kahn"}],"ko":[{"language":"ko","value":"\uc62c\ub9ac\ubc84 \ub864\ud504 \uce78"},{"language":"ko","value":"\uc62c\ub9ac\ubc84\uce78"}],"lv":[{"language":"lv","value":"Oliver Kahn"},{"language":"lv","value":"Kahn"},{"language":"lv","value":"Oliver Rolf Kahn"},{"language":"lv","value":"K\u0101ns"}],"de":[{"language":"de","value":"Oli Kahn"},{"language":"de","value":"Oliver Rolf Kahn"}],"ja":[{"language":"ja","value":"\u30aa\u30ea\u30f4\u30a1\u30fc\u30fb\u30ab\u30fc\u30f3"},{"language":"ja","value":"\u30aa\u30ea\u30d0\u30fc\u30ab\u30fc\u30f3"}],"sh":[{"language":"sh","value":"Kahn"},{"language":"sh","value":"Oliver Kan"}],"ca":[{"language":"ca","value":"Oliver Kahn"}],"mr":[{"language":"mr","value":"\u0911\u0932\u093f\u0935\u094d\u0939\u0930 \u0930\u0949\u0932\u094d\u092b \u0915\u093e\u0939\u094d\u0928"},{"language":"mr","value":"\u0911\u0932\u093f\u0935\u094d\u0939\u0930 \u0915\u093e\u0939\u094d\u0928"}],"bg":[{"language":"bg","value":"\u041e\u043b\u0438\u0432\u044a\u0440 \u041a\u0430\u043d"}],"fa":[{"language":"fa","value":"\u0627\u0648\u0644\u06cc\u0648\u0631 \u06a9\u0627\u0646"}],"ka":[{"language":"ka","value":"\u10d9\u10d0\u10dc\u10d8"}],"nb":[{"language":"nb","value":"Jens Mustermann"}]},"descriptions":{"fr":{"language":"fr","value":"footballeur allemand"},"it":{"language":"it","value":"calciatore tedesco"},"de":{"language":"de","value":"ehemaliger deutscher Fussballspieler"},"en":{"language":"en","value":"German footballer"},"es":{"language":"es","value":"futbolista alem\u00e1n"},"pl":{"language":"pl","value":"pi\u0142karz niemiecki (bramkarz)"},"fa":{"language":"fa","value":"\u0628\u0627\u0632\u06cc\u06a9\u0646 \u0641\u0648\u062a\u0628\u0627\u0644 \u0622\u0644\u0645\u0627\u0646\u06cc"},"nb":{"language":"nb","value":"tysk fotballspiller"},"nn":{"language":"nn","value":"tysk fotballspelar"},"sv":{"language":"sv","value":"tysk fotbollsspelare"},"da":{"language":"da","value":"tysk fodboldspiller"},"nl":{"language":"nl","value":"Duits voetballer"},"ms":{"language":"ms","value":"Pemain bola sepak Jerman"}},"claims":{"P21":[{"id":"q131261$DDA595E5-41CD-4594-AD9E-0EFCF5106686","mainsnak":{"snaktype":"value","property":"P21","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":6581097},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"39f3ce979f9d84a0ebf09abe1702bf22326695e9","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":169514},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]},{"hash":"50f57a3dbac4708ce4ae4a827c0afac7fcdb4a5c","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11920},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P106":[{"id":"Q131261$93102ED0-0A28-4369-ABF7-BF8A1C28007E","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":201330},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q131261$510DDC9A-CAC1-4902-9E0C-0D9127A3F9D3","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":937857},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P19":[{"id":"q131261$4BECDB1E-2C44-429F-87B4-426402B6C044","mainsnak":{"snaktype":"value","property":"P19","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1040},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"d6e3ab4045fb3f3feea77895bc6b27e663fc878a","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":206855},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P22":[{"id":"q131261$9E18D1DE-4D42-4143-9FC1-229FCDC638F1","mainsnak":{"snaktype":"value","property":"P22","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":2163497},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P27":[{"id":"q131261$A34628C6-96C9-4442-815A-2F36D5D7AD53","mainsnak":{"snaktype":"value","property":"P27","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":183},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P18":[{"id":"q131261$A6536CCF-F39E-4278-AEE7-09D82A4B7E65","mainsnak":{"snaktype":"value","property":"P18","datatype":"commonsMedia","datavalue":{"value":"Oliver Kahn 06-2004.jpg","type":"string"}},"type":"statement","rank":"normal"}],"P373":[{"id":"q131261$65FC4699-0774-441B-8760-6E0C8CB94E05","mainsnak":{"snaktype":"value","property":"P373","datatype":"string","datavalue":{"value":"Oliver Kahn","type":"string"}},"type":"statement","rank":"normal"}],"P214":[{"id":"q131261$6F4F515C-5F05-46B7-86D8-358B8123049C","mainsnak":{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"32875527","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"7eb64cf9621d34c54fd4bd040ed4b61a88c4a1a0","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":328},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]},{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P213":[{"id":"q131261$96ACF1D3-35E4-462C-AC67-C3AA483D9219","mainsnak":{"snaktype":"value","property":"P213","datatype":"string","datavalue":{"value":"0000 0001 0653 7069","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"500bf0768a4645f1e00ad451f904d3a010fa43ae","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":423048},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P244":[{"id":"q131261$9066A887-1238-4B0C-869D-C2423011BE70","mainsnak":{"snaktype":"value","property":"P244","datatype":"string","datavalue":{"value":"no2005020730","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P227":[{"id":"q131261$C068E3C0-2972-4475-8CED-D95F7115059F","mainsnak":{"snaktype":"value","property":"P227","datatype":"string","datavalue":{"value":"122329953","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P349":[{"id":"q131261$92F13DF5-DC6D-4590-A5AA-924841351F91","mainsnak":{"snaktype":"value","property":"P349","datatype":"string","datavalue":{"value":"00900364","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P569":[{"id":"q131261$6468BFF9-4EA4-4529-899C-984A391AA323","mainsnak":{"snaktype":"value","property":"P569","datatype":"time","datavalue":{"value":{"time":"+1969-06-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}},"type":"statement","rank":"normal","references":[{"hash":"7eb64cf9621d34c54fd4bd040ed4b61a88c4a1a0","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":328},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P54":[{"id":"q131261$4A0B67F1-5FD1-4F75-A20F-FE9FCDDF4E0A","mainsnak":{"snaktype":"value","property":"P54","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15789},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"q131261$6BE2B444-5B84-4332-B162-DB9BB202431A","mainsnak":{"snaktype":"value","property":"P54","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":105853},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P31":[{"id":"Q131261$C9F42682-A7C5-40D9-B18C-BE71842173D2","mainsnak":{"snaktype":"value","property":"P31","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"d6e3ab4045fb3f3feea77895bc6b27e663fc878a","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":206855},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P345":[{"id":"Q131261$65D4A8DB-0B23-468A-82F2-24C5F97BBCDA","mainsnak":{"snaktype":"value","property":"P345","datatype":"string","datavalue":{"value":"nm1853127","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P856":[{"id":"Q131261$71B01A11-3601-44E1-BF30-CAEB4DA970AE","mainsnak":{"snaktype":"value","property":"P856","datatype":"url","datavalue":{"value":"http://www.oliver-kahn.de/","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"7eb64cf9621d34c54fd4bd040ed4b61a88c4a1a0","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":328},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P166":[{"id":"Q131261$fd989353-4331-68e1-e0a7-bbaf8ed027ee","mainsnak":{"snaktype":"value","property":"P166","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":311830},"type":"wikibase-entityid"}},"qualifiers":{"P585":[{"hash":"ac5c270bf0113b5785cf1a0dcd7c7a27c95dad7f","snaktype":"value","property":"P585","datatype":"time","datavalue":{"value":{"time":"+2000-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"qualifiers-order":["P585"],"type":"statement","rank":"normal"},{"id":"Q131261$30505802-4C67-46CF-9432-AED40E64163D","mainsnak":{"snaktype":"value","property":"P166","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":315026},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q131261$bf04c4bb-42f6-24af-5af6-b4ed48d1f561","mainsnak":{"snaktype":"value","property":"P166","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":17355204},"type":"wikibase-entityid"}},"qualifiers":{"P585":[{"hash":"7131a4cbf49bcebe3c1161b815cd7cca77a5ba1e","snaktype":"value","property":"P585","datatype":"time","datavalue":{"value":{"time":"+2002-07-02T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"qualifiers-order":["P585"],"type":"statement","rank":"normal","references":[{"hash":"36e58d30e140ec311fef34711ae5d0b16d82c579","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.fifa.com/newscentre/news/newsid=82639/index.html","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2014-07-14T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P813"]}]},{"id":"Q131261$b1d3aeee-48ad-fb09-73e9-e221dce199df","mainsnak":{"snaktype":"value","property":"P166","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":311830},"type":"wikibase-entityid"}},"qualifiers":{"P585":[{"hash":"e6b62cb417762f541d7ff71221604d3f843b44b4","snaktype":"value","property":"P585","datatype":"time","datavalue":{"value":{"time":"+2001-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"qualifiers-order":["P585"],"type":"statement","rank":"normal"},{"id":"Q131261$eb485f94-45ee-eb08-a0ff-348c1d7a33e7","mainsnak":{"snaktype":"value","property":"P166","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":17351855},"type":"wikibase-entityid"}},"qualifiers":{"P585":[{"hash":"2381ef58c4805182fc83e66c5dd5e839dc760346","snaktype":"value","property":"P585","datatype":"time","datavalue":{"value":{"time":"+2002-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"qualifiers-order":["P585"],"type":"statement","rank":"normal","references":[{"hash":"8905a30c761ddbd39c0452b3aeeae051f810b940","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://de.fifa.com/worldcup/awards/golden-glove/intro.html","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2014-07-14T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P813"]}]},{"id":"Q131261$E191FD69-9AD9-4913-9774-4F4A6266BF97","mainsnak":{"snaktype":"value","property":"P166","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1432320},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P646":[{"id":"Q131261$E9A61268-5D88-49D3-BD01-49769F10775C","mainsnak":{"snaktype":"value","property":"P646","datatype":"string","datavalue":{"value":"/m/01ywhq","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"af38848ab5d9d9325cffd93a5ec656cc6ca889ed","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15241312},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2013-10-28T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P577"]}]}],"P734":[{"id":"Q131261$75017E0E-6F13-4F51-9766-714A89008275","mainsnak":{"snaktype":"value","property":"P734","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1260948},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"50f57a3dbac4708ce4ae4a827c0afac7fcdb4a5c","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11920},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P735":[{"id":"Q131261$7BF74BFD-A0F6-4424-9544-DB4EC9C90CB9","mainsnak":{"snaktype":"value","property":"P735","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":2110096},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P641":[{"id":"Q131261$5544F138-DD92-4777-A9DA-359C119F5861","mainsnak":{"snaktype":"value","property":"P641","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":2736},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"7eb64cf9621d34c54fd4bd040ed4b61a88c4a1a0","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":328},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1344":[{"id":"Q131261$E777087C-A71A-4339-AE5C-5FE0FB0D3928","mainsnak":{"snaktype":"value","property":"P1344","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":37285},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q131261$223C0928-960B-486E-8CF5-F1A726723032","mainsnak":{"snaktype":"value","property":"P1344","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":47735},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q131261$756054A2-3C6F-48C8-8055-F6FB74E25ABC","mainsnak":{"snaktype":"value","property":"P1344","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":101730},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q131261$4BCFA1E9-E59F-4B1C-A60A-3DC23B4F2BD4","mainsnak":{"snaktype":"value","property":"P1344","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":101751},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P1469":[{"id":"Q131261$CA775C24-5297-434A-AAC2-0694113FD109","mainsnak":{"snaktype":"value","property":"P1469","datatype":"string","datavalue":{"value":"78091","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"86cc8a5ea2c24db0138f73584f515964543bb53f","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":199864},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1285":[{"id":"Q131261$7383D210-6030-48A3-9DC1-772CE4DFDD0F","mainsnak":{"snaktype":"value","property":"P1285","datatype":"string","datavalue":{"value":"01000004092","type":"string"}},"type":"statement","rank":"normal"}]},"sitelinks":{"arwiki":{"site":"arwiki","title":"\u0623\u0648\u0644\u064a\u0641\u0631 \u0643\u0627\u0646","badges":[]},"azwiki":{"site":"azwiki","title":"Oliver Kan","badges":[]},"barwiki":{"site":"barwiki","title":"Oliver Kahn","badges":[]},"bgwiki":{"site":"bgwiki","title":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d","badges":[]},"bnwiki":{"site":"bnwiki","title":"\u0985\u09b2\u09bf\u09ad\u09be\u09b0 \u0995\u09be\u09a8","badges":[]},"brwiki":{"site":"brwiki","title":"Oliver Kahn","badges":[]},"bswiki":{"site":"bswiki","title":"Oliver Kahn","badges":[]},"cawiki":{"site":"cawiki","title":"Oliver Rolf Kahn","badges":[]},"ckbwiki":{"site":"ckbwiki","title":"\u0626\u06c6\u0644\u06cc\u06a4\u06d5\u0631 \u06a9\u0627\u0646","badges":[]},"commonswiki":{"site":"commonswiki","title":"Oliver Kahn","badges":[]},"cswiki":{"site":"cswiki","title":"Oliver Kahn","badges":[]},"dawiki":{"site":"dawiki","title":"Oliver Kahn","badges":[]},"dewiki":{"site":"dewiki","title":"Oliver Kahn","badges":[]},"dewikiquote":{"site":"dewikiquote","title":"Oliver Kahn","badges":[]},"elwiki":{"site":"elwiki","title":"\u038c\u03bb\u03b9\u03b2\u03b5\u03c1 \u039a\u03b1\u03bd","badges":[]},"enwiki":{"site":"enwiki","title":"Oliver Kahn","badges":["Q17437798"]},"eowiki":{"site":"eowiki","title":"Oliver Kahn","badges":[]},"eswiki":{"site":"eswiki","title":"Oliver Kahn","badges":[]},"etwiki":{"site":"etwiki","title":"Oliver Kahn","badges":[]},"euwiki":{"site":"euwiki","title":"Oliver Kahn","badges":[]},"fawiki":{"site":"fawiki","title":"\u0627\u0644\u06cc\u0648\u0631 \u06a9\u0627\u0646","badges":[]},"fiwiki":{"site":"fiwiki","title":"Oliver Kahn","badges":[]},"frwiki":{"site":"frwiki","title":"Oliver Kahn","badges":[]},"hewiki":{"site":"hewiki","title":"\u05d0\u05d5\u05dc\u05d9\u05d1\u05e8 \u05e7\u05d0\u05df","badges":[]},"hewikiquote":{"site":"hewikiquote","title":"\u05d0\u05d5\u05dc\u05d9\u05d1\u05e8 \u05e7\u05d0\u05df","badges":[]},"hrwiki":{"site":"hrwiki","title":"Oliver Kahn","badges":[]},"huwiki":{"site":"huwiki","title":"Oliver Kahn","badges":[]},"hywiki":{"site":"hywiki","title":"\u0555\u056c\u056b\u057e\u0565\u0580 \u053f\u0561\u0576","badges":[]},"idwiki":{"site":"idwiki","title":"Oliver Kahn","badges":[]},"iowiki":{"site":"iowiki","title":"Oliver Kahn","badges":[]},"iswiki":{"site":"iswiki","title":"Oliver Kahn","badges":[]},"itwiki":{"site":"itwiki","title":"Oliver Kahn","badges":[]},"jawiki":{"site":"jawiki","title":"\u30aa\u30ea\u30d0\u30fc\u30fb\u30ab\u30fc\u30f3","badges":[]},"jvwiki":{"site":"jvwiki","title":"Oliver Kahn","badges":[]},"kawiki":{"site":"kawiki","title":"\u10dd\u10da\u10d8\u10d5\u10d4\u10e0 \u10d9\u10d0\u10dc\u10d8","badges":[]},"kowiki":{"site":"kowiki","title":"\uc62c\ub9ac\ubc84 \uce78","badges":[]},"ltwiki":{"site":"ltwiki","title":"Oliver Kahn","badges":[]},"lvwiki":{"site":"lvwiki","title":"Olivers K\u0101ns","badges":[]},"mgwiki":{"site":"mgwiki","title":"Oliver Kahn","badges":[]},"mkwiki":{"site":"mkwiki","title":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d","badges":[]},"mrwiki":{"site":"mrwiki","title":"\u0913\u0932\u093f\u092b\u0930 \u0915\u093e\u0928","badges":[]},"mswiki":{"site":"mswiki","title":"Oliver Kahn","badges":[]},"newiki":{"site":"newiki","title":"\u0913\u0932\u093f\u092d\u0930 \u0915\u093e\u0928","badges":[]},"nlwiki":{"site":"nlwiki","title":"Oliver Kahn","badges":[]},"nowiki":{"site":"nowiki","title":"Oliver Kahn","badges":[]},"plwiki":{"site":"plwiki","title":"Oliver Kahn","badges":[]},"ptwiki":{"site":"ptwiki","title":"Oliver Kahn","badges":[]},"quwiki":{"site":"quwiki","title":"Oliver Kahn","badges":[]},"rowiki":{"site":"rowiki","title":"Oliver Kahn","badges":[]},"ruwiki":{"site":"ruwiki","title":"\u041a\u0430\u043d, \u041e\u043b\u0438\u0432\u0435\u0440","badges":[]},"scowiki":{"site":"scowiki","title":"Oliver Kahn","badges":[]},"shwiki":{"site":"shwiki","title":"Oliver Kahn","badges":[]},"simplewiki":{"site":"simplewiki","title":"Oliver Kahn","badges":[]},"skwiki":{"site":"skwiki","title":"Oliver Kahn","badges":[]},"slwiki":{"site":"slwiki","title":"Oliver Kahn","badges":[]},"sqwiki":{"site":"sqwiki","title":"Oliver Kahn","badges":[]},"srwiki":{"site":"srwiki","title":"\u041e\u043b\u0438\u0432\u0435\u0440 \u041a\u0430\u043d","badges":[]},"svwiki":{"site":"svwiki","title":"Oliver Kahn","badges":[]},"tkwiki":{"site":"tkwiki","title":"Oliwer Kan","badges":[]},"trwiki":{"site":"trwiki","title":"Oliver Kahn","badges":["Q17437798"]},"ukwiki":{"site":"ukwiki","title":"\u041e\u043b\u0456\u0432\u0435\u0440 \u041a\u0430\u043d","badges":[]},"viwiki":{"site":"viwiki","title":"Oliver Kahn","badges":[]},"zh_yuewiki":{"site":"zh_yuewiki","title":"\u7c21\u5c3c","badges":[]},"zhwiki":{"site":"zhwiki","title":"\u5965\u5229\u5f17\u00b7\u5361\u6069","badges":["Q17437798"]}}}},"success":1}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-1234567890-missing.json000066400000000000000000000001231444772566300323300ustar00rootroot00000000000000{"entities":{"-1":{"site":"dewiki","title":"1234567890","missing":""}},"success":1}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-Douglas-Adams.json000066400000000000000000002243311444772566300322170ustar00rootroot00000000000000{"entities":{"Q42":{"id":"Q42","type":"item","labels":{"fr":{"language":"fr","value":"Douglas Adams"},"ru":{"language":"ru","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"pl":{"language":"pl","value":"Douglas Adams"},"it":{"language":"it","value":"Douglas Adams"},"en-gb":{"language":"en-gb","value":"Douglas Adams"},"nb":{"language":"nb","value":"Douglas Adams"},"es":{"language":"es","value":"Douglas Adams"},"en-ca":{"language":"en-ca","value":"Douglas Adams"},"hr":{"language":"hr","value":"Douglas Adams"},"pt":{"language":"pt","value":"Douglas Adams"},"ko":{"language":"ko","value":"\ub354\uae00\ub7ec\uc2a4 \uc560\ub364\uc2a4"},"nl":{"language":"nl","value":"Douglas Adams"},"el":{"language":"el","value":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u0386\u03bd\u03c4\u03b1\u03bc\u03c2"},"ar":{"language":"ar","value":"\u062f\u0648\u063a\u0644\u0627\u0633 \u0622\u062f\u0645\u0632"},"arz":{"language":"arz","value":"\u062f\u0648\u062c\u0644\u0627\u0633 \u0627\u062f\u0627\u0645\u0632"},"bar":{"language":"bar","value":"Douglas Adams"},"be":{"language":"be","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"bg":{"language":"bg","value":"\u0414\u044a\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"bs":{"language":"bs","value":"Douglas Adams"},"ca":{"language":"ca","value":"Douglas Adams"},"cs":{"language":"cs","value":"Douglas Adams"},"cy":{"language":"cy","value":"Douglas Adams"},"da":{"language":"da","value":"Douglas Adams"},"eo":{"language":"eo","value":"Douglas ADAMS"},"et":{"language":"et","value":"Douglas Adams"},"fa":{"language":"fa","value":"\u062f\u0627\u06af\u0644\u0627\u0633 \u0622\u062f\u0627\u0645\u0632"},"fi":{"language":"fi","value":"Douglas Adams"},"ga":{"language":"ga","value":"Douglas Adams"},"gl":{"language":"gl","value":"Douglas Adams"},"he":{"language":"he","value":"\u05d3\u05d0\u05d2\u05dc\u05e1 \u05d0\u05d3\u05d0\u05de\u05e1"},"hu":{"language":"hu","value":"Douglas Adams"},"id":{"language":"id","value":"Douglas Adams"},"io":{"language":"io","value":"Douglas Adams"},"is":{"language":"is","value":"Douglas Adams"},"ja":{"language":"ja","value":"\u30c0\u30b0\u30e9\u30b9\u30fb\u30a2\u30c0\u30e0\u30ba"},"jv":{"language":"jv","value":"Douglas Adams"},"ka":{"language":"ka","value":"\u10d3\u10d0\u10d2\u10da\u10d0\u10e1 \u10d0\u10d3\u10d0\u10db\u10e1\u10d8"},"la":{"language":"la","value":"Duglassius Adams"},"lv":{"language":"lv","value":"Duglass Adamss"},"mk":{"language":"mk","value":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"mr":{"language":"mr","value":"\u0921\u0917\u094d\u0932\u0938 \u0905\u0945\u0921\u092e\u094d\u0938"},"nn":{"language":"nn","value":"Douglas Adams"},"ro":{"language":"ro","value":"Douglas Adams"},"sco":{"language":"sco","value":"Douglas Adams"},"sh":{"language":"sh","value":"Douglas Adams"},"sk":{"language":"sk","value":"Douglas Adams"},"sl":{"language":"sl","value":"Douglas Adams"},"sq":{"language":"sq","value":"Douglas Adams"},"sr":{"language":"sr","value":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"sv":{"language":"sv","value":"Douglas Adams"},"ta":{"language":"ta","value":"\u0b9f\u0b95\u0bcd\u0bb3\u0bb8\u0bcd \u0b86\u0b9f\u0bae\u0bcd\u0bb8\u0bcd"},"tr":{"language":"tr","value":"Douglas Adams"},"uk":{"language":"uk","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"vi":{"language":"vi","value":"Douglas Adams"},"zh":{"language":"zh","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-cn":{"language":"zh-cn","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-hans":{"language":"zh-hans","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-hant":{"language":"zh-hant","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"de-ch":{"language":"de-ch","value":"Douglas Adams"},"pt-br":{"language":"pt-br","value":"Douglas Adams"},"zh-sg":{"language":"zh-sg","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-my":{"language":"zh-my","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-hk":{"language":"zh-hk","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"zh-tw":{"language":"zh-tw","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"zh-mo":{"language":"zh-mo","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"war":{"language":"war","value":"Douglas Adams"},"be-tarask":{"language":"be-tarask","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0437"},"be-x-old":{"language":"be-x-old","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"vep":{"language":"vep","value":"Adams Duglas"},"ur":{"language":"ur","value":"\u0688\u06af\u0644\u0633 \u0627\u06cc\u0688\u0645"},"oc":{"language":"oc","value":"Douglas Adams"},"af":{"language":"af","value":"Douglas Adams"},"an":{"language":"an","value":"Douglas Adams"},"br":{"language":"br","value":"Douglas Adams"},"eu":{"language":"eu","value":"Douglas Adams"},"lb":{"language":"lb","value":"Douglas Adams"},"lmo":{"language":"lmo","value":"Douglas Adams"},"lt":{"language":"lt","value":"Douglas Adams"},"nds":{"language":"nds","value":"Douglas Adams"},"nds-nl":{"language":"nds-nl","value":"Douglas Adams"},"nl-informal":{"language":"nl-informal","value":"Douglas Adams"},"pms":{"language":"pms","value":"Douglas Adams"},"vec":{"language":"vec","value":"Douglas Adams"},"wa":{"language":"wa","value":"Douglas Adams"},"sr-ec":{"language":"sr-ec","value":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"sr-el":{"language":"sr-el","value":"Daglas Adams"},"de":{"language":"de","value":"Douglas Adams"},"en":{"language":"en","value":"Douglas Adams"},"ckb":{"language":"ckb","value":"\u062f\u06d5\u06af\u0644\u0627\u0633 \u0626\u0627\u062f\u0645\u0632"},"fo":{"language":"fo","value":"Douglas Adams"},"kl":{"language":"kl","value":"Douglas Adams"},"gsw":{"language":"gsw","value":"Douglas Adams"},"te":{"language":"te","value":"\u0c21\u0c17\u0c4d\u0c32\u0c38\u0c4d \u0c06\u0c21\u0c2e\u0c4d\u0c38\u0c4d"},"si":{"language":"si","value":"\u0da9\u0d9c\u0dca\u0dbd\u0dc3\u0dca \u0d87\u0da9\u0db8\u0dca\u0dc3\u0dca"},"bn":{"language":"bn","value":"\u09a1\u0997\u09b2\u09be\u09b8"},"hi":{"language":"hi","value":"\u0921\u0917\u094d\u0932\u0938 \u0905\u200d\u0921\u092e\u094d\u0938"},"rwr":{"language":"rwr","value":"\u0921\u0917\u094d\u0932\u0938 \u0905\u200d\u0921\u092e\u094d\u0938"},"mg":{"language":"mg","value":"Douglas Adams"},"ml":{"language":"ml","value":"\u0d21\u0d17\u0d4d\u0d32\u0d38\u0d4d \u0d06\u0d21\u0d02\u0d38\u0d4d"},"gu":{"language":"gu","value":"\u0aa1\u0a97\u0acd\u0ab2\u0abe\u0ab8 \u0a8f\u0aa1\u0aae\u0acd\u0ab8"},"hy":{"language":"hy","value":"\u0534\u0578\u0582\u0563\u056c\u0561\u057d \u0531\u0564\u0561\u0574\u057d"},"ast":{"language":"ast","value":"Douglas Adams"},"co":{"language":"co","value":"Douglas Adams"},"de-at":{"language":"de-at","value":"Douglas Adams"},"frp":{"language":"frp","value":"Douglas Adams"},"fur":{"language":"fur","value":"Douglas Adams"},"gd":{"language":"gd","value":"Douglas Adams"},"ia":{"language":"ia","value":"Douglas Adams"},"ie":{"language":"ie","value":"Douglas Adams"},"kg":{"language":"kg","value":"Douglas Adams"},"li":{"language":"li","value":"Douglas Adams"},"lij":{"language":"lij","value":"Douglas Adams"},"min":{"language":"min","value":"Douglas Adams"},"ms":{"language":"ms","value":"Douglas Adams"},"nap":{"language":"nap","value":"Douglas Adams"},"nrm":{"language":"nrm","value":"Douglas Adams"},"pcd":{"language":"pcd","value":"Douglas Adams"},"rm":{"language":"rm","value":"Douglas Adams"},"sc":{"language":"sc","value":"Douglas Adams"},"scn":{"language":"scn","value":"Douglas Adams"},"sw":{"language":"sw","value":"Douglas Adams"},"vls":{"language":"vls","value":"Douglas Adams"},"vo":{"language":"vo","value":"Douglas Adams"},"wo":{"language":"wo","value":"Douglas Adams"},"zu":{"language":"zu","value":"Douglas Adams"},"az":{"language":"az","value":"Duqlas Noel Adams"},"ak":{"language":"ak","value":"Doglas Adams"},"or":{"language":"or","value":"\u0b21\u0b17\u0b4d\u200c\u0b32\u0b3e\u0b38\u0b4d\u200c \u0b06\u0b26\u0b3e\u0b2e\u0b4d\u200c\u0b38"}},"aliases":{"en":[{"language":"en","value":"Douglas No\u00ebl Adams"},{"language":"en","value":"Douglas Noel Adams"}],"ru":[{"language":"ru","value":"\u0410\u0434\u0430\u043c\u0441, \u0414\u0443\u0433\u043b\u0430\u0441"}],"nb":[{"language":"nb","value":"Douglas No\u00ebl Adams"}],"fr":[{"language":"fr","value":"Douglas Noel Adams"}],"de":[{"language":"de","value":"Douglas No\u00ebl Adams"}],"pt-br":[{"language":"pt-br","value":"Douglas No\u00ebl Adams"},{"language":"pt-br","value":"Douglas Noel Adams"}],"be-tarask":[{"language":"be-tarask","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"}],"zh":[{"language":"zh","value":"\u4e9e\u7576\u65af"}],"es":[{"language":"es","value":"Douglas Noel Adams"}],"it":[{"language":"it","value":"Douglas Noel Adams"}],"cs":[{"language":"cs","value":"Douglas No\u00ebl Adams"},{"language":"cs","value":"Douglas Noel Adams"}],"hy":[{"language":"hy","value":"\u0531\u0564\u0561\u0574\u057d, \u0534\u0578\u0582\u0563\u056c\u0561\u057d"}],"el":[{"language":"el","value":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u039d\u03cc\u03b5\u03bb \u0386\u03bd\u03c4\u03b1\u03bc\u03c2"}]},"descriptions":{"en":{"language":"en","value":"English writer and humorist"},"fr":{"language":"fr","value":"\u00e9crivain anglais de science-fiction"},"en-gb":{"language":"en-gb","value":"English writer and humourist"},"nb":{"language":"nb","value":"engelsk science fiction-forfatter og humorist"},"it":{"language":"it","value":"scrittore inglese"},"de":{"language":"de","value":"britischer Schriftsteller"},"es":{"language":"es","value":"escritor y guionista radiof\u00f3nico brit\u00e1nico"},"ru":{"language":"ru","value":"\u0430\u043d\u0433\u043b\u0438\u0439\u0441\u043a\u0438\u0439 \u043f\u0438\u0441\u0430\u0442\u0435\u043b\u044c, \u0434\u0440\u0430\u043c\u0430\u0442\u0443\u0440\u0433 \u0438 \u0441\u0446\u0435\u043d\u0430\u0440\u0438\u0441\u0442, \u0430\u0432\u0442\u043e\u0440 \u0441\u0435\u0440\u0438\u0438 \u043a\u043d\u0438\u0433 \u00ab\u0410\u0432\u0442\u043e\u0441\u0442\u043e\u043f\u043e\u043c \u043f\u043e \u0433\u0430\u043b\u0430\u043a\u0442\u0438\u043a\u0435\u00bb."},"zh-hans":{"language":"zh-hans","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-hant":{"language":"zh-hant","value":"\u82f1\u570b\u4f5c\u5bb6"},"zh-cn":{"language":"zh-cn","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-sg":{"language":"zh-sg","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-my":{"language":"zh-my","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh":{"language":"zh","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-hk":{"language":"zh-hk","value":"\u82f1\u570b\u4f5c\u5bb6"},"zh-tw":{"language":"zh-tw","value":"\u82f1\u570b\u4f5c\u5bb6"},"zh-mo":{"language":"zh-mo","value":"\u82f1\u570b\u4f5c\u5bb6"},"ca":{"language":"ca","value":"escriptor angl\u00e8s"},"fi":{"language":"fi","value":"englantilainen kirjailija ja humoristi"},"cs":{"language":"cs","value":"anglick\u00fd spisovatel, humorista a dramatik"},"sv":{"language":"sv","value":"brittisk f\u00f6rfattare och humorist"},"pt-br":{"language":"pt-br","value":"escritor e humorista ingl\u00eas"},"ta":{"language":"ta","value":"\u0b86\u0b99\u0bcd\u0b95\u0bbf\u0bb2 \u0b8e\u0bb4\u0bc1\u0ba4\u0bcd\u0ba4\u0bbe\u0bb3\u0bb0\u0bcd \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0ba8\u0b95\u0bc8\u0b9a\u0bcd\u0b9a\u0bc1\u0bb5\u0bc8\u0baf\u0bbe\u0bb3\u0bb0\u0bcd"},"sl":{"language":"sl","value":"angle\u0161ki pisatelj, humorist in dramatik"},"da":{"language":"da","value":"forfatter"},"nl":{"language":"nl","value":"Engels sciencefictionschrijver"},"pt":{"language":"pt","value":"escritor e roteirista ingl\u00eas"},"pl":{"language":"pl","value":"brytyjski pisarz"},"lv":{"language":"lv","value":"ang\u013cu zin\u0101tnisk\u0101s fantastikas rakstnieks un humorists"},"simple":{"language":"simple","value":"writer and humorist from England"},"sr":{"language":"sr","value":"\u0435\u043d\u0433\u043b\u0435\u0441\u043a\u0438 \u043f\u0438\u0441\u0430\u0446 \u043d\u0430\u0443\u0447\u043d\u0435 \u0444\u0430\u043d\u0442\u0430\u0441\u0442\u0438\u043a\u0435 \u0438 \u0445\u0443\u043c\u043e\u0440\u0438\u0441\u0442\u0430"},"sr-ec":{"language":"sr-ec","value":"\u0435\u043d\u0433\u043b\u0435\u0441\u043a\u0438 \u043f\u0438\u0441\u0430\u0446 \u043d\u0430\u0443\u0447\u043d\u0435 \u0444\u0430\u043d\u0442\u0430\u0441\u0442\u0438\u043a\u0435 \u0438 \u0445\u0443\u043c\u043e\u0440\u0438\u0441\u0442\u0430"},"sr-el":{"language":"sr-el","value":"engleski pisac nau\u010dne fantastike i humorista"},"eo":{"language":"eo","value":"angla a\u016dtoro de sciencfikcio-romanoj kaj humoristo"},"bar":{"language":"bar","value":"a englischer Science-Fiction-Schriftsteller"},"br":{"language":"br","value":"skrivagner saoznek"},"ja":{"language":"ja","value":"\u30a4\u30f3\u30b0\u30e9\u30f3\u30c9\u306e\u4f5c\u5bb6"},"nn":{"language":"nn","value":"engelsk sciencefictionforfattar og humorist"},"tr":{"language":"tr","value":"\u0130ngiliz bilim kurgu ve mizah yazar\u0131"},"si":{"language":"si","value":"\u0d89\u0d82\u0d9c\u0dca\u200d\u0dbb\u0dd3\u0dc3\u0dd2 \u0d9a\u0dc0\u0dd2\u0dba\u0dd9\u0d9a\u0dca"},"vi":{"language":"vi","value":"Nh\u00e0 v\u0103n v\u00e0 nh\u00e0 so\u1ea1n h\u00e0i k\u1ecbch ng\u01b0\u1eddi Anh"},"cy":{"language":"cy","value":"awdur a dychanwr Seisnig"},"gu":{"language":"gu","value":"\u0a85\u0a82\u0a97\u0acd\u0ab0\u0ac7\u0a9c\u0ac0 \u0ab2\u0ac7\u0a96\u0a95 \u0a85\u0aa8\u0ac7 \u0ab9\u0abe\u0ab8\u0acd\u0aaf\u0a95\u0abe\u0ab0"},"uk":{"language":"uk","value":"\u0431\u0440\u0438\u0442\u0430\u043d\u0441\u044c\u043a\u0438\u0439 \u043a\u043e\u043c\u0456\u0447\u043d\u0438\u0439 \u0440\u0430\u0434\u0456\u043e\u0434\u0440\u0430\u043c\u0430\u0442\u0443\u0440\u0433, \u043f\u0438\u0441\u044c\u043c\u0435\u043d\u043d\u0438\u043a"},"ro":{"language":"ro","value":"scriitor, dramaturg englez"},"hu":{"language":"hu","value":"angol \u00edr\u00f3"},"fa":{"language":"fa","value":"\u0641\u06cc\u0644\u0645\u0646\u0627\u0645\u0647\u200c\u0646\u0648\u06cc\u0633 \u0648 \u0646\u0648\u06cc\u0633\u0646\u062f\u0647 \u0628\u0631\u06cc\u062a\u0627\u0646\u06cc\u0627\u06cc\u06cc"},"af":{"language":"af","value":"Engelse skrywer en humoris"},"mk":{"language":"mk","value":"\u0430\u043d\u0433\u043b\u0438\u0441\u043a\u0438 \u043f\u0438\u0441\u0430\u0442\u0435\u043b"},"el":{"language":"el","value":"\u0386\u03b3\u03b3\u03bb\u03bf\u03c2 \u03c3\u03c5\u03b3\u03b3\u03c1\u03b1\u03c6\u03ad\u03b1\u03c2"},"hy":{"language":"hy","value":"\u0561\u0576\u0563\u056c\u056b\u0561\u0581\u056b \u0563\u0580\u0578\u0572, \u0564\u0580\u0561\u0574\u0561\u057f\u0578\u0582\u0580\u0563, \u057d\u0581\u0565\u0576\u0561\u0580\u056b\u057d\u057f, \u00ab\u0531\u057e\u057f\u0578\u057d\u057f\u0578\u057a\u0578\u057e \u0566\u0562\u0578\u057d\u0561\u0577\u0580\u057b\u056b\u056f\u056b \u0574\u056b\u057b\u0563\u0561\u056c\u0561\u056f\u057f\u056b\u056f\u0561\u056f\u0561\u0576 \u0578\u0582\u0572\u0565\u0581\u0578\u0582\u0575\u0581\u00bb \u057e\u0565\u057a\u0565\u0580\u056b \u0577\u0561\u0580\u0584"}},"claims":{"P31":[{"id":"Q42$F078E5B3-F9A8-480E-B7AC-D97778CBBEF9","mainsnak":{"snaktype":"value","property":"P31","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"050ec907ff2d96e82eddea6ecfc54f12503b9f4c","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P214","P813"]}]}],"P1368":[{"id":"Q42$11725e9f-4f81-e0fd-b00a-b885fe7a75ac","mainsnak":{"snaktype":"value","property":"P1368","datatype":"string","datavalue":{"value":"000057405","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1477":[{"id":"Q42$45220d20-40d2-299e-f4cc-f6cce89f2f42","mainsnak":{"snaktype":"value","property":"P1477","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas No\u00ebl Adams","language":"en"},"type":"monolingualtext"}},"type":"statement","rank":"normal","references":[{"hash":"f6c671a38daa2881f1c4c901c1de5eeb76c11978","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Obituary: Douglas Adams","language":"en"},"type":"monolingualtext"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11148},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2001-05-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.theguardian.com/news/2001/may/15/guardianobituaries.books","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P50":[{"snaktype":"value","property":"P50","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18145749},"type":"wikibase-entityid"}}]},"snaks-order":["P1476","P123","P577","P364","P854","P813","P50"]}]}],"P1015":[{"id":"Q42$6583fdb7-4ffa-9fe1-4288-1a1cbb2950d0","mainsnak":{"snaktype":"value","property":"P1015","datatype":"string","datavalue":{"value":"x90196888","type":"string"}},"type":"statement","rank":"normal"}],"P735":[{"id":"Q42$1d7d0ea9-412f-8b5b-ba8d-405ab9ecf026","mainsnak":{"snaktype":"value","property":"P735","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":463035},"type":"wikibase-entityid"}},"type":"statement","rank":"preferred"},{"id":"Q42$1e106952-4b58-6067-c831-8593ce3d70f5","mainsnak":{"snaktype":"value","property":"P735","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":261113},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P734":[{"id":"Q42$24df999a-4629-c679-e1f0-199bcefabbf3","mainsnak":{"snaktype":"value","property":"P734","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":351735},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P21":[{"id":"q42$39F4DE4F-C277-449C-9F99-512350971B5B","mainsnak":{"snaktype":"value","property":"P21","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":6581097},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"050ec907ff2d96e82eddea6ecfc54f12503b9f4c","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P214","P813"]}]}],"P18":[{"id":"q42$43D37345-54ED-4FF2-A226-EC26A356E38D","mainsnak":{"snaktype":"value","property":"P18","datatype":"commonsMedia","datavalue":{"value":"Douglas adams portrait cropped.jpg","type":"string"}},"type":"statement","rank":"normal"}],"P19":[{"id":"q42$3D284234-52BC-4DA3-83A3-7C39F84BA518","mainsnak":{"snaktype":"value","property":"P19","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":350},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]},{"hash":"cc8e207a81b62f7eb17b07d44dfcf8e52a3dc080","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.theguardian.com/news/2001/may/15/guardianobituaries.books","type":"string"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2001-05-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1433":[{"snaktype":"value","property":"P1433","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11148},"type":"wikibase-entityid"}}],"P50":[{"snaktype":"value","property":"P50","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18145749},"type":"wikibase-entityid"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Obituary: Douglas Adams","language":"en"},"type":"monolingualtext"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"snaks-order":["P854","P577","P813","P1433","P50","P1476","P407"]},{"hash":"7e6f5106921a912c63f4ce557693502e11b83998","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Hitch Hiker's Guide author Douglas Adams dies aged 49","language":"en"},"type":"monolingualtext"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2001-05-13T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":192621},"type":"wikibase-entityid"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.telegraph.co.uk/news/uknews/1330072/Hitch-Hikers-Guide-author-Douglas-Adams-dies-aged-49.html","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2015-01-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P1476","P577","P123","P407","P854","P813"]}]}],"P27":[{"id":"q42$DE2A0C89-6199-44D0-B727-D7A4BE031A2B","mainsnak":{"snaktype":"value","property":"P27","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":145},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"050ec907ff2d96e82eddea6ecfc54f12503b9f4c","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P214","P813"]}]}],"P103":[{"id":"q42$D9E6DEFB-472B-44F6-A8E2-E2B90700C74A","mainsnak":{"snaktype":"value","property":"P103","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"43fc2b2664b154de4cfd68b6a5e1239e1b1d9951","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":193563},"type":"wikibase-entityid"}}],"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P268","P813"]}]}],"P119":[{"id":"q42$881F40DC-0AFE-4FEB-B882-79600D234273","mainsnak":{"snaktype":"value","property":"P119","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":533697},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"095945968d9abac7b03e3507fd1336448949cb99","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}],"P535":[{"snaktype":"value","property":"P535","datatype":"string","datavalue":{"value":"22814","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P535","P813"]},{"hash":"be3d7896f24c7ae3495cb50bb6791718b78e9103","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://highgatecemetery.org/visit/who","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":533697},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Who\u2019s here","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]}]}],"P20":[{"id":"q42$C0DE2013-54C0-48F9-AD90-8A235248D8C7","mainsnak":{"snaktype":"value","property":"P20","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":159288},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]},{"hash":"e38dadec9d6b784aab71b64e334557250d40c256","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P140":[{"id":"q42$8419C20C-8EF8-4EC0-80D6-AF1CA55E7557","mainsnak":{"snaktype":"value","property":"P140","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":7066},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"23fa16418189eaee574c252fa6c2d4b433e2f9fa","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.douglasadams.eu/en_adams_athee.php","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams and God. Portrait of a radical atheist","language":"en"},"type":"monolingualtext"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"snaks-order":["P854","P813","P1476","P407"]},{"hash":"3fb7315a05ace04ecc4bda533fdb612a94b2daca","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nichirenbuddhist.org/Religion/Atheists/DouglasAdams/Interview-American-Atheists.html","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15290435},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2002-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams' Interview with American Atheists","language":"en"},"type":"monolingualtext"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"snaks-order":["P854","P123","P577","P813","P1476","P407"]}]}],"P106":[{"id":"q42$E13E619F-63EF-4B72-99D9-7A45C7C6AD34","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":28389},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"43fc2b2664b154de4cfd68b6a5e1239e1b1d9951","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":193563},"type":"wikibase-entityid"}}],"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P268","P813"]}]},{"id":"Q42$D6E21D67-05D6-4A0B-8458-0744FCEED13D","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":6625963},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q42$7eb8aaef-4ddf-8b87-bd02-406f91a296bd","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":4853732},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"q42$CBDC4890-D5A2-469C-AEBB-EFB682B891E7","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18844224},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"43fc2b2664b154de4cfd68b6a5e1239e1b1d9951","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":193563},"type":"wikibase-entityid"}}],"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P268","P813"]}]},{"id":"Q42$58F0D772-9CE4-46AC-BF0D-FBBBAFA09603","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":36180},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P244":[{"id":"q42$2D472379-EC67-4C71-9700-0F9D551BF5E6","mainsnak":{"snaktype":"value","property":"P244","datatype":"string","datavalue":{"value":"n80076765","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3e9859118d01bc62b5dbe8939be812333eb7c594","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1551807},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P947":[{"id":"Q42$cf5f61ec-440d-60d4-7847-e95f75171f2f","mainsnak":{"snaktype":"value","property":"P947","datatype":"string","datavalue":{"value":"000002833","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"980624fa9331261f9383f286b4056619228b626f","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1048694},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P214":[{"id":"q42$488251B2-6732-4D49-85B0-6101803C97AB","mainsnak":{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3e9859118d01bc62b5dbe8939be812333eb7c594","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1551807},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P345":[{"id":"q42$231549F5-0296-4D87-993D-6CBE3F24C0D2","mainsnak":{"snaktype":"value","property":"P345","datatype":"string","datavalue":{"value":"nm0010930","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P373":[{"id":"q42$7EC4631F-FB22-4768-9B75-61875CD6C854","mainsnak":{"snaktype":"value","property":"P373","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}},"type":"statement","rank":"normal"}],"P349":[{"id":"q42$31B1BC2A-D09F-4151-AD2B-5CEA229B9058","mainsnak":{"snaktype":"value","property":"P349","datatype":"string","datavalue":{"value":"00430962","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P213":[{"id":"q42$1CF5840B-A274-402B-9556-F202C2F9B831","mainsnak":{"snaktype":"value","property":"P213","datatype":"string","datavalue":{"value":"0000 0000 8045 6315","type":"string"}},"type":"statement","rank":"normal"}],"P434":[{"id":"q42$fc61f952-4071-7cc1-c20a-dc7a90ad6515","mainsnak":{"snaktype":"value","property":"P434","datatype":"string","datavalue":{"value":"e9ed318d-8cc5-4cf8-ab77-505e39ab6ea4","type":"string"}},"type":"statement","rank":"normal"}],"P269":[{"id":"q42$D0E17F5E-4302-43F8-926B-5FE7AA8A4380","mainsnak":{"snaktype":"value","property":"P269","datatype":"string","datavalue":{"value":"026677636","type":"string"}},"type":"statement","rank":"normal"}],"P268":[{"id":"q42$BB4B67FE-FECA-4469-9DEE-3E8F03AC9F1D","mainsnak":{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"f70116eac7f49194478b3025330bfd8dcffa3c69","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":8447},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P227":[{"id":"q42$8AA8CCC1-86CE-4C66-88FC-267621A81EA0","mainsnak":{"snaktype":"value","property":"P227","datatype":"string","datavalue":{"value":"119033364","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3e9859118d01bc62b5dbe8939be812333eb7c594","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1551807},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]},{"hash":"9f454a27f5efb737e03ba11bd3e85a1ea1c08a7d","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1419226},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P535":[{"id":"q42$0DD4F039-6CDC-40C9-871B-63CDE4A47032","mainsnak":{"snaktype":"value","property":"P535","datatype":"string","datavalue":{"value":"22814","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"57a1934b7460d09727f7ad27f181f8b0da396975","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Noel Adams","language":"en"},"type":"monolingualtext"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.findagrave.com/cgi-bin/fg.cgi?page=gr&GRid=22814","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2001-06-25T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P1476","P854","P123","P577","P813"]}]}],"P509":[{"id":"q42$E651BD8A-EA3E-478A-8558-C956EE60B29F","mainsnak":{"snaktype":"value","property":"P509","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":12152},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"095945968d9abac7b03e3507fd1336448949cb99","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}],"P535":[{"snaktype":"value","property":"P535","datatype":"string","datavalue":{"value":"22814","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P535","P813"]},{"hash":"1a6c2030885a0ef22b79ee3102bf84621432d96a","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.historyorb.com/people/douglas-adams","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15290366},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Famous People - Douglas Adams","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]},{"hash":"923566d714cc30be3821ab8383fb973624027a2d","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Obituary: Douglas Adams","language":"en"},"type":"monolingualtext"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11148},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2001-05-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.theguardian.com/news/2001/may/15/guardianobituaries.books","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2014-01-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P50":[{"snaktype":"value","property":"P50","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18145749},"type":"wikibase-entityid"}}]},"snaks-order":["P1476","P123","P577","P407","P854","P813","P50"]},{"hash":"aff958dcf62e41999ba4e69b26ccd400ddadf6ec","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Hitch Hiker's Guide author Douglas Adams dies aged 49","language":"en"},"type":"monolingualtext"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":192621},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2001-05-13T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.telegraph.co.uk/news/uknews/1330072/Hitch-Hikers-Guide-author-Douglas-Adams-dies-aged-49.html","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2014-01-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P1476","P123","P577","P407","P854","P813"]}]}],"P569":[{"id":"q42$D8404CDA-25E4-4334-AF13-A3290BCD9C0F","mainsnak":{"snaktype":"value","property":"P569","datatype":"time","datavalue":{"value":{"time":"+1952-03-11T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}},"type":"statement","rank":"normal","references":[{"hash":"6f6a14880df1a77b2ca9f6093bad8f68386f0d0c","snaks":{"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15222191},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P268","P248","P813"]},{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P570":[{"id":"q42$65EA9C32-B26C-469B-84FE-FC612B71D159","mainsnak":{"snaktype":"value","property":"P570","datatype":"time","datavalue":{"value":{"time":"+2001-05-11T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}},"type":"statement","rank":"normal","references":[{"hash":"6f6a14880df1a77b2ca9f6093bad8f68386f0d0c","snaks":{"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15222191},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P268","P248","P813"]},{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P691":[{"id":"q42$704392C4-6E77-4E25-855F-7CF2D198DD6A","mainsnak":{"snaktype":"value","property":"P691","datatype":"string","datavalue":{"value":"jn19990000029","type":"string"}},"type":"statement","rank":"normal"}],"P9":[{"id":"q42$76d70dc8-4646-cc84-b66c-be9ed1c469e2","mainsnak":{"snaktype":"value","property":"P9","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623673},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"c75cea5df57da844ab4708013996bf1501def461","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]}]}],"P22":[{"id":"q42$9ac7fb72-4402-8d72-f588-a170ca5e715c","mainsnak":{"snaktype":"value","property":"P22","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623675},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"863064bcfd5ad8b5987fe5034e924f5d970e097d","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P25":[{"id":"q42$cf4cccbe-470e-e627-86a3-70ef115f601c","mainsnak":{"snaktype":"value","property":"P25","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623678},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"863064bcfd5ad8b5987fe5034e924f5d970e097d","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P26":[{"id":"q42$b88670f8-456b-3ecb-cf3d-2bca2cf7371e","mainsnak":{"snaktype":"value","property":"P26","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623681},"type":"wikibase-entityid"}},"qualifiers":{"P580":[{"hash":"b42b4077a100e1a8cb55586caec525bcee1ed7dd","snaktype":"value","property":"P580","datatype":"time","datavalue":{"value":{"time":"+1991-11-25T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P582":[{"hash":"79d42f9dcfeab031b16f712d728f6a8225329bc6","snaktype":"value","property":"P582","datatype":"time","datavalue":{"value":{"time":"+2001-05-11T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"qualifiers-order":["P580","P582"],"type":"statement","rank":"normal","references":[{"hash":"c75cea5df57da844ab4708013996bf1501def461","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]}]}],"P40":[{"id":"q42$70b600fa-4c0a-b3e6-9e19-1486e71c99fb","mainsnak":{"snaktype":"value","property":"P40","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623683},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"863064bcfd5ad8b5987fe5034e924f5d970e097d","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P409":[{"id":"q42$506fc7c8-439d-b77f-5041-8ca85659ad57","mainsnak":{"snaktype":"value","property":"P409","datatype":"string","datavalue":{"value":"35163268","type":"string"}},"type":"statement","rank":"normal"}],"P910":[{"id":"Q42$3B111597-2138-4517-85AD-FD0056D3DEB0","mainsnak":{"snaktype":"value","property":"P910","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":8935487},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P906":[{"id":"Q42$D92DF8AE-786C-4C3E-8A33-BABD8CB06D31","mainsnak":{"snaktype":"value","property":"P906","datatype":"string","datavalue":{"value":"230807","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"6db5f234c81ddf3171f0971c57e1ac2c834b2796","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1798125},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P950":[{"id":"Q42$856BE41B-546B-4381-B671-07DC17E1F677","mainsnak":{"snaktype":"value","property":"P950","datatype":"string","datavalue":{"value":"XX1149955","type":"string"}},"type":"statement","rank":"normal"}],"P1006":[{"id":"Q42$B7643D02-6EF0-4932-A36A-3A2D4DA3F578","mainsnak":{"snaktype":"value","property":"P1006","datatype":"string","datavalue":{"value":"068744307","type":"string"}},"type":"statement","rank":"normal"}],"P1005":[{"id":"Q42$35342507-3E6E-4F3C-9BB6-F05C9F7DBD95","mainsnak":{"snaktype":"value","property":"P1005","datatype":"string","datavalue":{"value":"68537","type":"string"}},"type":"statement","rank":"normal"}],"P949":[{"id":"Q42$2D50AE02-2BD8-4F82-9DFD-B3166DEFDEC1","mainsnak":{"snaktype":"value","property":"P949","datatype":"string","datavalue":{"value":"000163846","type":"string"}},"type":"statement","rank":"normal"}],"P396":[{"id":"Q42$b4c088b8-4bd9-c037-6b4e-7a0be3730947","mainsnak":{"snaktype":"value","property":"P396","datatype":"string","datavalue":{"value":"IT\\ICCU\\RAVV\\034417","type":"string"}},"type":"statement","rank":"normal"}],"P646":[{"id":"Q42$48D9C731-BDA8-45D6-B593-437CD10A51B4","mainsnak":{"snaktype":"value","property":"P646","datatype":"string","datavalue":{"value":"/m/0282x","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"af38848ab5d9d9325cffd93a5ec656cc6ca889ed","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15241312},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+2013-10-28T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P577"]}]}],"P69":[{"id":"q42$0E9C4724-C954-4698-84A7-5CE0D296A6F2","mainsnak":{"snaktype":"value","property":"P69","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":35794},"type":"wikibase-entityid"}},"qualifiers":{"P582":[{"hash":"9c9aa1050b05acfe16f0334bee307c20965ecaf6","snaktype":"value","property":"P582","datatype":"time","datavalue":{"value":{"time":"+1974-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P812":[{"hash":"81b44430e63da20d9bffc9bad4b244a1a6d30e93","snaktype":"value","property":"P812","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":186579},"type":"wikibase-entityid"}}],"P512":[{"hash":"158d7693369e716aaae6bef281ee0921a2fc5bb2","snaktype":"value","property":"P512","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1765120},"type":"wikibase-entityid"}}]},"qualifiers-order":["P582","P812","P512"],"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]},{"hash":"c75cea5df57da844ab4708013996bf1501def461","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]}]},{"id":"Q42$7BC309BF-DC35-468D-A7D3-3785922B0B1F","mainsnak":{"snaktype":"value","property":"P69","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":691283},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q42$9a22ca61-4543-4039-67e8-b8b472c629de","mainsnak":{"snaktype":"value","property":"P69","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":4961791},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P1273":[{"id":"Q42$4A2873C0-D848-4F3D-8066-38204E50414C","mainsnak":{"snaktype":"value","property":"P1273","datatype":"string","datavalue":{"value":"a10667040","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"1fe0761d4c6964bd0083fc8af5f2a4d18d707aa6","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"https://viaf.org/viaf/113230702/","type":"string"}}]},"snaks-order":["P854"]}]}],"P1415":[{"id":"Q42$F4EC4761-2DCC-4106-8156-D5D36B5FA29A","mainsnak":{"snaktype":"value","property":"P1415","datatype":"string","datavalue":{"value":"101075853","type":"string"}},"type":"statement","rank":"normal"}],"P108":[{"id":"Q42$853B16C8-1AB3-489A-831E-AEAD7E94AB87","mainsnak":{"snaktype":"value","property":"P108","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":9531},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P1417":[{"id":"Q42$23C4CC8F-2AEE-4730-80E1-288C273E8D3B","mainsnak":{"snaktype":"value","property":"P1417","datatype":"string","datavalue":{"value":"5111","type":"string"}},"qualifiers":{"P958":[{"hash":"f7236db820d284e3ae3c9bfb2f12362e9a7f500e","snaktype":"value","property":"P958","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}]},"qualifiers-order":["P958"],"type":"statement","rank":"normal"}],"P800":[{"id":"Q42$FA73986E-3D1D-4CAB-B358-424B58544620","mainsnak":{"snaktype":"value","property":"P800","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":25169},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]},{"id":"Q42$61ce65a9-454a-5b97-e014-496299c1c03a","mainsnak":{"snaktype":"value","property":"P800","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":902712},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"9f05dcf7ed170ed9e18a7a17e601bd158e161fb4","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":52},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P998":[{"id":"Q42$BE724F6B-6981-4DE9-B90C-338768A4BFC4","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"Arts/Literature/Authors/A/Adams,_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"17da29e56d69809fde8793aaa4864de2e6bb5780","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"preferred"},{"id":"Q42$5776B538-2441-4B9E-9C39-4E6289396763","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Dansk/Kultur/Litteratur/Forfattere/A/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"eed80ca4e1ffc12b82c55116042dabdb873707ad","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":9035},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$B60CF952-9C65-4875-A4BA-6B8516C81E99","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Fran%C3%A7ais/Arts/Litt%C3%A9rature/Genres/Science-fiction_et_fantastique/Auteurs/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"3be4fb23771c9decf6c908552444e6753215dcf4","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":150},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$A0B48E74-C934-42B9-A583-FB3EAE4BC9BA","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Deutsch/Kultur/Literatur/Autoren_und_Autorinnen/A/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"bfab56097f2ee29b68110953c09618468db6871b","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":188},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$F2632AC4-6F24-49E4-9E4E-B008F26BA8CE","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Italiano/Arte/Letteratura/Autori/A/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"a77ef6d322e3915085c305de616027d3f709c807","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":652},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$84B82B5A-8F33-4229-B988-BF960E676875","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Svenska/Kultur/Litteratur/Genre/Science_fiction_och_fantasy/F%C3%B6rfattare/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"feef8b68d719a5caffb99cd28280ed8133f04965","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":9027},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"}],"P1233":[{"id":"Q42$9F55FA72-F9E5-41E4-A771-041EB1D59C28","mainsnak":{"snaktype":"value","property":"P1233","datatype":"string","datavalue":{"value":"122","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1207":[{"id":"Q42$00ddd8cf-48fa-609f-dd4e-977e9672c96f","mainsnak":{"snaktype":"value","property":"P1207","datatype":"string","datavalue":{"value":"n94004172","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1375":[{"id":"Q42$97db6877-4c06-88ce-2db5-aaba53383fd2","mainsnak":{"snaktype":"value","property":"P1375","datatype":"string","datavalue":{"value":"000010283","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1670":[{"id":"Q42$2370b5b3-487b-89dd-ad93-b023a2a86ac4","mainsnak":{"snaktype":"value","property":"P1670","datatype":"string","datavalue":{"value":"0052C2705","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1284":[{"id":"Q42$2EE16C9C-B74B-4322-9542-4A132555B363","mainsnak":{"snaktype":"value","property":"P1284","datatype":"string","datavalue":{"value":"00000020676","type":"string"}},"type":"statement","rank":"normal"}],"P866":[{"id":"Q42$A29644ED-0377-4F88-8BA6-FAAB7DE8C7BA","mainsnak":{"snaktype":"value","property":"P866","datatype":"string","datavalue":{"value":"douglas-adams","type":"string"}},"type":"statement","rank":"normal"}],"P1695":[{"id":"Q42$9B5EED2E-C3F5-4634-8B85-4D4CC6F15C93","mainsnak":{"snaktype":"value","property":"P1695","datatype":"string","datavalue":{"value":"A11573065","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"74459442f89f27373d1716e217c113727fd9201a","snaks":{"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+2015-03-07T00:00:00Z","timezone":60,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P214","P248","P813"]}]}],"P1816":[{"id":"Q42$A70EF87C-33F4-4366-B0A7-000C5A3A43E5","mainsnak":{"snaktype":"value","property":"P1816","datatype":"string","datavalue":{"value":"mp60152","type":"string"}},"type":"statement","rank":"normal"}],"P1263":[{"id":"Q42$9B26C69E-7B9E-43EB-9B20-AD1305D1EE6B","mainsnak":{"snaktype":"value","property":"P1263","datatype":"string","datavalue":{"value":"731/000023662","type":"string"}},"type":"statement","rank":"normal"}]},"sitelinks":{"arwiki":{"site":"arwiki","title":"\u062f\u0648\u063a\u0644\u0627\u0633 \u0622\u062f\u0645\u0632","badges":[]},"arzwiki":{"site":"arzwiki","title":"\u062f\u0648\u062c\u0644\u0627\u0633 \u0627\u062f\u0627\u0645\u0632","badges":[]},"azwikiquote":{"site":"azwikiquote","title":"Duqlas Noel Adams","badges":[]},"barwiki":{"site":"barwiki","title":"Douglas Adams","badges":[]},"be_x_oldwiki":{"site":"be_x_oldwiki","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0437","badges":[]},"bewiki":{"site":"bewiki","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"bgwiki":{"site":"bgwiki","title":"\u0414\u044a\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"bgwikiquote":{"site":"bgwikiquote","title":"\u0414\u044a\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"bswiki":{"site":"bswiki","title":"Douglas Adams","badges":[]},"bswikiquote":{"site":"bswikiquote","title":"Douglas Adams","badges":[]},"cawiki":{"site":"cawiki","title":"Douglas Adams","badges":[]},"cswiki":{"site":"cswiki","title":"Douglas Adams","badges":[]},"cswikiquote":{"site":"cswikiquote","title":"Douglas Adams","badges":[]},"cywiki":{"site":"cywiki","title":"Douglas Adams","badges":[]},"dawiki":{"site":"dawiki","title":"Douglas Adams","badges":[]},"dewiki":{"site":"dewiki","title":"Douglas Adams","badges":[]},"dewikiquote":{"site":"dewikiquote","title":"Douglas Adams","badges":[]},"elwiki":{"site":"elwiki","title":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u0386\u03bd\u03c4\u03b1\u03bc\u03c2","badges":[]},"elwikiquote":{"site":"elwikiquote","title":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u0386\u03bd\u03c4\u03b1\u03bc\u03c2","badges":[]},"enwiki":{"site":"enwiki","title":"Douglas Adams","badges":[]},"enwikiquote":{"site":"enwikiquote","title":"Douglas Adams","badges":[]},"eowiki":{"site":"eowiki","title":"Douglas Adams","badges":[]},"eowikiquote":{"site":"eowikiquote","title":"Douglas Adams","badges":[]},"eswiki":{"site":"eswiki","title":"Douglas Adams","badges":[]},"eswikiquote":{"site":"eswikiquote","title":"Douglas Adams","badges":[]},"etwiki":{"site":"etwiki","title":"Douglas Adams","badges":[]},"etwikiquote":{"site":"etwikiquote","title":"Douglas Adams","badges":[]},"euwiki":{"site":"euwiki","title":"Douglas Adams","badges":[]},"fawiki":{"site":"fawiki","title":"\u062f\u0627\u06af\u0644\u0627\u0633 \u0622\u062f\u0627\u0645\u0632","badges":[]},"fawikiquote":{"site":"fawikiquote","title":"\u062f\u0627\u06af\u0644\u0627\u0633 \u0622\u062f\u0627\u0645\u0632","badges":[]},"fiwiki":{"site":"fiwiki","title":"Douglas Adams","badges":[]},"fiwikiquote":{"site":"fiwikiquote","title":"Douglas Adams","badges":[]},"frwiki":{"site":"frwiki","title":"Douglas Adams","badges":[]},"frwikiquote":{"site":"frwikiquote","title":"Douglas Adams","badges":[]},"gawiki":{"site":"gawiki","title":"Douglas Adams","badges":[]},"glwiki":{"site":"glwiki","title":"Douglas Adams","badges":[]},"glwikiquote":{"site":"glwikiquote","title":"Douglas Adams","badges":[]},"hewiki":{"site":"hewiki","title":"\u05d3\u05d0\u05d2\u05dc\u05e1 \u05d0\u05d3\u05de\u05e1","badges":[]},"hewikiquote":{"site":"hewikiquote","title":"\u05d3\u05d0\u05d2\u05dc\u05e1 \u05d0\u05d3\u05de\u05e1","badges":[]},"hrwiki":{"site":"hrwiki","title":"Douglas Adams","badges":[]},"huwiki":{"site":"huwiki","title":"Douglas Adams","badges":[]},"huwikiquote":{"site":"huwikiquote","title":"Douglas Adams","badges":[]},"hywiki":{"site":"hywiki","title":"\u0534\u0578\u0582\u0563\u056c\u0561\u057d \u0531\u0564\u0561\u0574\u057d","badges":[]},"hywikiquote":{"site":"hywikiquote","title":"\u0534\u0578\u0582\u0563\u056c\u0561\u057d \u0531\u0564\u0561\u0574\u057d","badges":[]},"idwiki":{"site":"idwiki","title":"Douglas Adams","badges":[]},"iowiki":{"site":"iowiki","title":"Douglas Adams","badges":[]},"iswiki":{"site":"iswiki","title":"Douglas Adams","badges":[]},"itwiki":{"site":"itwiki","title":"Douglas Adams","badges":[]},"itwikiquote":{"site":"itwikiquote","title":"Douglas Adams","badges":[]},"jawiki":{"site":"jawiki","title":"\u30c0\u30b0\u30e9\u30b9\u30fb\u30a2\u30c0\u30e0\u30ba","badges":[]},"jvwiki":{"site":"jvwiki","title":"Douglas Adams","badges":[]},"kawiki":{"site":"kawiki","title":"\u10d3\u10d0\u10d2\u10da\u10d0\u10e1 \u10d0\u10d3\u10d0\u10db\u10e1\u10d8","badges":[]},"kowiki":{"site":"kowiki","title":"\ub354\uae00\ub7ec\uc2a4 \uc560\ub364\uc2a4","badges":[]},"lawiki":{"site":"lawiki","title":"Duglassius Adams","badges":[]},"liwikiquote":{"site":"liwikiquote","title":"Douglas Adams","badges":[]},"ltwikiquote":{"site":"ltwikiquote","title":"Douglas Adamsas","badges":[]},"lvwiki":{"site":"lvwiki","title":"Duglass Adamss","badges":[]},"mgwiki":{"site":"mgwiki","title":"Douglas Adams","badges":[]},"mkwiki":{"site":"mkwiki","title":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"mlwiki":{"site":"mlwiki","title":"\u0d21\u0d17\u0d4d\u0d32\u0d38\u0d4d \u0d06\u0d21\u0d02\u0d38\u0d4d","badges":[]},"mrwiki":{"site":"mrwiki","title":"\u0921\u0917\u094d\u0932\u0938 \u0905\u200d\u0945\u0921\u092e\u094d\u0938","badges":[]},"nlwiki":{"site":"nlwiki","title":"Douglas Adams","badges":[]},"nlwikiquote":{"site":"nlwikiquote","title":"Douglas Adams","badges":[]},"nnwiki":{"site":"nnwiki","title":"Douglas Adams","badges":[]},"nowiki":{"site":"nowiki","title":"Douglas Adams","badges":[]},"ocwiki":{"site":"ocwiki","title":"Douglas Adams","badges":[]},"plwiki":{"site":"plwiki","title":"Douglas Adams","badges":[]},"plwikiquote":{"site":"plwikiquote","title":"Douglas Adams","badges":[]},"ptwiki":{"site":"ptwiki","title":"Douglas Adams","badges":[]},"ptwikiquote":{"site":"ptwikiquote","title":"Douglas Adams","badges":[]},"rowiki":{"site":"rowiki","title":"Douglas Adams","badges":[]},"ruwiki":{"site":"ruwiki","title":"\u0410\u0434\u0430\u043c\u0441, \u0414\u0443\u0433\u043b\u0430\u0441","badges":[]},"ruwikiquote":{"site":"ruwikiquote","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u041d\u043e\u044d\u043b\u044c \u0410\u0434\u0430\u043c\u0441","badges":[]},"scowiki":{"site":"scowiki","title":"Douglas Adams","badges":[]},"shwiki":{"site":"shwiki","title":"Douglas Adams","badges":[]},"simplewiki":{"site":"simplewiki","title":"Douglas Adams","badges":[]},"simplewikiquote":{"site":"simplewikiquote","title":"Douglas Adams","badges":[]},"skwiki":{"site":"skwiki","title":"Douglas Adams","badges":[]},"skwikiquote":{"site":"skwikiquote","title":"Douglas Adams","badges":[]},"slwiki":{"site":"slwiki","title":"Douglas Adams","badges":[]},"sqwiki":{"site":"sqwiki","title":"Douglas Adams","badges":[]},"srwiki":{"site":"srwiki","title":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"svwiki":{"site":"svwiki","title":"Douglas Adams","badges":[]},"svwikiquote":{"site":"svwikiquote","title":"Douglas Adams","badges":[]},"tawiki":{"site":"tawiki","title":"\u0b9f\u0b95\u0bcd\u0bb3\u0bb8\u0bcd \u0b86\u0b9f\u0bae\u0bcd\u0bb8\u0bcd","badges":[]},"trwiki":{"site":"trwiki","title":"Douglas Adams","badges":[]},"trwikiquote":{"site":"trwikiquote","title":"Douglas Adams","badges":[]},"ukwiki":{"site":"ukwiki","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"urwiki":{"site":"urwiki","title":"\u0688\u06af\u0644\u0633 \u0627\u06cc\u0688\u0645\u0633","badges":[]},"vepwiki":{"site":"vepwiki","title":"Adams Duglas","badges":[]},"viwiki":{"site":"viwiki","title":"Douglas Adams","badges":[]},"warwiki":{"site":"warwiki","title":"Douglas Adams","badges":[]},"zhwiki":{"site":"zhwiki","title":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af","badges":[]},"zhwikiquote":{"site":"zhwikiquote","title":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af","badges":[]}}}},"success":1}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-Q1.json000066400000000000000000001120231444772566300300510ustar00rootroot00000000000000{"entities":{"Q1":{"id":"Q1","type":"item","labels":{"en":{"language":"en","value":"universe"},"fr":{"language":"fr","value":"Univers"},"la":{"language":"la","value":"universum"},"uz":{"language":"uz","value":"Olam"},"ru":{"language":"ru","value":"\u0432\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f"},"pl":{"language":"pl","value":"wszech\u015bwiat"},"nb":{"language":"nb","value":"universet"},"eo":{"language":"eo","value":"universo"},"it":{"language":"it","value":"universo"},"es":{"language":"es","value":"universo"},"de":{"language":"de","value":"Universum"},"ca":{"language":"ca","value":"univers"},"en-gb":{"language":"en-gb","value":"universe"},"de-ch":{"language":"de-ch","value":"Universum"},"fi":{"language":"fi","value":"maailmankaikkeus"},"nn":{"language":"nn","value":"Universet"},"ja":{"language":"ja","value":"\u5b87\u5b99"},"zh-hant":{"language":"zh-hant","value":"\u5b87\u5b99"},"hr":{"language":"hr","value":"Svemir"},"pt":{"language":"pt","value":"universo"},"simple":{"language":"simple","value":"Universe"},"hu":{"language":"hu","value":"vil\u00e1gegyetem"},"nl":{"language":"nl","value":"heelal"},"ro":{"language":"ro","value":"univers"},"sv":{"language":"sv","value":"universum"},"gl":{"language":"gl","value":"universo"},"eu":{"language":"eu","value":"Unibertso"},"mk":{"language":"mk","value":"\u0432\u0441\u0435\u043b\u0435\u043d\u0430"},"da":{"language":"da","value":"Universet"},"br":{"language":"br","value":"Hollved"},"et":{"language":"et","value":"Universum"},"af":{"language":"af","value":"heelal"},"cy":{"language":"cy","value":"Bydysawd"},"io":{"language":"io","value":"Universo"},"ia":{"language":"ia","value":"Universo"},"is":{"language":"is","value":"Alheimurinn"},"tr":{"language":"tr","value":"Evren"},"cs":{"language":"cs","value":"vesm\u00edr"},"sk":{"language":"sk","value":"Vesm\u00edr"},"uk":{"language":"uk","value":"\u0432\u0441\u0435\u0441\u0432\u0456\u0442"},"an":{"language":"an","value":"Universo"},"az":{"language":"az","value":"Kainat"},"ast":{"language":"ast","value":"Universu"},"gn":{"language":"gn","value":"Arapy"},"bs":{"language":"bs","value":"Svemir"},"sn":{"language":"sn","value":"Rudunhumwe"},"nv":{"language":"nv","value":"Y\u00e1gh\u00e1hook\u00e1\u00e1n"},"dsb":{"language":"dsb","value":"Uniwersum"},"hif":{"language":"hif","value":"Sansaar"},"fo":{"language":"fo","value":"Alheimurin"},"fy":{"language":"fy","value":"Hielal"},"ga":{"language":"ga","value":"An Chruinne"},"hak":{"language":"hak","value":"Y\u00ee-chhiu"},"id":{"language":"id","value":"Alam semesta"},"jv":{"language":"jv","value":"Alam semesta"},"pam":{"language":"pam","value":"Sikluban"},"csb":{"language":"csb","value":"Swiatnica"},"sw":{"language":"sw","value":"Ulimwengu"},"ht":{"language":"ht","value":"Liniv\u00e8"},"ku":{"language":"ku","value":"Gerd\u00fbn"},"lv":{"language":"lv","value":"Visums"},"lt":{"language":"lt","value":"Visata"},"li":{"language":"li","value":"Universum"},"lmo":{"language":"lmo","value":"\u00dcnivers"},"ms":{"language":"ms","value":"Alam semesta"},"mwl":{"language":"mwl","value":"Ouniberso"},"nah":{"language":"nah","value":"Cem\u0101n\u0101huac"},"nds-nl":{"language":"nds-nl","value":"Hielal"},"nap":{"language":"nap","value":"Annevierzo"},"frr":{"language":"frr","value":"\u00c5\u00e5l"},"nrm":{"language":"nrm","value":"Eunivers"},"nov":{"language":"nov","value":"Universe"},"oc":{"language":"oc","value":"Univ\u00e8rs"},"pfl":{"language":"pfl","value":"Weldall"},"pap":{"language":"pap","value":"Universo"},"nds":{"language":"nds","value":"Weltruum"},"qu":{"language":"qu","value":"Ch'askancha"},"stq":{"language":"stq","value":"Al"},"sq":{"language":"sq","value":"Gjith\u00ebsia"},"scn":{"language":"scn","value":"universu"},"sl":{"language":"sl","value":"Vesolje"},"sh":{"language":"sh","value":"Svemir"},"su":{"language":"su","value":"Jagat"},"tl":{"language":"tl","value":"Uniberso"},"war":{"language":"war","value":"Sangkalibutan"},"vi":{"language":"vi","value":"v\u0169 tr\u1ee5"},"bn":{"language":"bn","value":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac"},"ar":{"language":"ar","value":"\u0643\u0648\u0646"},"arc":{"language":"arc","value":"\u072c\u0712\u071d\u0720"},"arz":{"language":"arz","value":"\u0643\u0648\u0646"},"be":{"language":"be","value":"\u0441\u0443\u0441\u0432\u0435\u0442"},"bg":{"language":"bg","value":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430"},"ckb":{"language":"ckb","value":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646"},"cv":{"language":"cv","value":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435"},"el":{"language":"el","value":"\u03c3\u03cd\u03bc\u03c0\u03b1\u03bd"},"fa":{"language":"fa","value":"\u06af\u06cc\u062a\u06cc"},"gu":{"language":"gu","value":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1"},"he":{"language":"he","value":"\u05d4\u05d9\u05e7\u05d5\u05dd"},"hi":{"language":"hi","value":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921"},"hy":{"language":"hy","value":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584"},"ka":{"language":"ka","value":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd"},"kk":{"language":"kk","value":"\u04d8\u043b\u0435\u043c"},"kn":{"language":"kn","value":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1"},"ko":{"language":"ko","value":"\uc6b0\uc8fc"},"lez":{"language":"lez","value":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440"},"ml":{"language":"ml","value":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02"},"mn":{"language":"mn","value":"\u041e\u0440\u0447\u043b\u043e\u043d"},"mr":{"language":"mr","value":"\u0935\u093f\u0936\u094d\u0935"},"my":{"language":"my","value":"\u1005\u1000\u103c\u101d\u1020\u102c"},"ne":{"language":"ne","value":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921"},"pnb":{"language":"pnb","value":"\u06a9\u0627\u0626\u0646\u0627\u062a"},"rue":{"language":"rue","value":"\u0412\u0435\u0441\u043c\u0456\u0440"},"sr":{"language":"sr","value":"\u0421\u0432\u0435\u043c\u0438\u0440"},"ta":{"language":"ta","value":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd"},"te":{"language":"te","value":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02"},"tg":{"language":"tg","value":"\u041a\u043e\u0438\u043d\u043e\u0442"},"th":{"language":"th","value":"\u0e40\u0e2d\u0e01\u0e20\u0e1e"},"tt":{"language":"tt","value":"\u0413\u0430\u043b\u04d9\u043c"},"ur":{"language":"ur","value":"\u06a9\u0627\u0626\u0646\u0627\u062a"},"xmf":{"language":"xmf","value":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3"},"yi":{"language":"yi","value":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1"},"zh":{"language":"zh","value":"\u5b87\u5b99"},"en-ca":{"language":"en-ca","value":"Universe"},"pt-br":{"language":"pt-br","value":"universo"},"yue":{"language":"yue","value":"\u5b87\u5b99"},"zh-cn":{"language":"zh-cn","value":"\u5b87\u5b99"},"zh-hans":{"language":"zh-hans","value":"\u5b87\u5b99"},"zh-sg":{"language":"zh-sg","value":"\u5b87\u5b99"},"zh-my":{"language":"zh-my","value":"\u5b87\u5b99"},"zh-hk":{"language":"zh-hk","value":"\u5b87\u5b99"},"zh-tw":{"language":"zh-tw","value":"\u5b87\u5b99"},"zh-mo":{"language":"zh-mo","value":"\u5b87\u5b99"},"si":{"language":"si","value":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba"},"ilo":{"language":"ilo","value":"law-ang"},"jbo":{"language":"jbo","value":"la munje"},"vep":{"language":"vep","value":"Mir"},"be-tarask":{"language":"be-tarask","value":"\u0441\u0443\u0441\u044c\u0432\u0435\u0442"},"bar":{"language":"bar","value":"W\u00f6dall"},"pms":{"language":"pms","value":"Univers"},"sr-ec":{"language":"sr-ec","value":"\u0421\u0432\u0435\u043c\u0438\u0440"},"sr-el":{"language":"sr-el","value":"Svemir"},"sco":{"language":"sco","value":"universe"},"or":{"language":"or","value":"\u0b2c\u0b4d\u0b30\u0b39\u0b4d\u0b2e\u0b3e\u0b23\u0b4d\u0b21"},"ba":{"language":"ba","value":"\u0492\u0430\u043b\u04d9\u043c"},"sgs":{"language":"sgs","value":"V\u0117sata"},"nan":{"language":"nan","value":"\u00da-ti\u016b"},"ce":{"language":"ce","value":"\u04c0\u0430\u043b\u0430\u043c"},"lzh":{"language":"lzh","value":"\u5b87\u5b99"},"so":{"language":"so","value":"Koon"},"krc":{"language":"krc","value":"\u0411\u043e\u043b\u0443\u043c"},"bxr":{"language":"bxr","value":"\u041e\u0440\u0448\u043e\u043b\u043e\u043d"},"lb":{"language":"lb","value":"Universum"},"lzz":{"language":"lzz","value":"Universi"},"vec":{"language":"vec","value":"univ\u00e8rso"},"cdo":{"language":"cdo","value":"\u1e72\u0304-d\u00eau"},"gd":{"language":"gd","value":"Domhan"},"sd":{"language":"sd","value":"\u06aa\u0627\u0626\u0646\u0627\u062a"},"xh":{"language":"xh","value":"Ihlabathi"},"gsw":{"language":"gsw","value":"Universum"},"pa":{"language":"pa","value":"\u0a2c\u0a4d\u0a30\u0a39\u0a3f\u0a2e\u0a70\u0a21"}},"aliases":{"pl":[{"language":"pl","value":"kosmos"},{"language":"pl","value":"\u015bwiat"},{"language":"pl","value":"natura"},{"language":"pl","value":"uniwersum"}],"en":[{"language":"en","value":"cosmos"},{"language":"en","value":"The Universe"}],"es":[{"language":"es","value":"cosmos"}],"de":[{"language":"de","value":"Weltall"},{"language":"de","value":"All"},{"language":"de","value":"Kosmos"},{"language":"de","value":"Weltraum"}],"fr":[{"language":"fr","value":"Cosmos"}],"eo":[{"language":"eo","value":"Kosmo"}],"it":[{"language":"it","value":"cosmo"},{"language":"it","value":"spazio"}],"nl":[{"language":"nl","value":"universum"},{"language":"nl","value":"kosmos"},{"language":"nl","value":"cosmos"}],"ca":[{"language":"ca","value":"cosmos"}],"fi":[{"language":"fi","value":"universumi"},{"language":"fi","value":"kaikkeus"}],"hu":[{"language":"hu","value":"univerzum"}],"sv":[{"language":"sv","value":"Kosmos"},{"language":"sv","value":"V\u00e4rldsaltet"},{"language":"sv","value":"V\u00e4rldsrymden"}],"nds":[{"language":"nds","value":"Universum"},{"language":"nds","value":"Kosmos"}],"fa":[{"language":"fa","value":"\u062c\u0647\u0627\u0646"},{"language":"fa","value":"\u0639\u0627\u0644\u0645"},{"language":"fa","value":"\u0686\u0631\u062e \u06af\u0631\u062f\u0648\u0646"},{"language":"fa","value":"\u06a9\u06cc\u0647\u0627\u0646"},{"language":"fa","value":"\u06a9\u0627\u06cc\u0646\u0627\u062a"},{"language":"fa","value":"\u0647\u0633\u062a\u06cc"}],"ta":[{"language":"ta","value":"\u0baa\u0bbf\u0bb0\u0baa\u0b9e\u0bcd\u0b9a\u0bae\u0bcd"},{"language":"ta","value":"\u0baa\u0bc7\u0bb0\u0ba3\u0bcd\u0b9f\u0bae\u0bcd"},{"language":"ta","value":"\u0baa\u0bb2\u0bcd\u0bb2\u0ba3\u0bcd\u0b9f\u0bae\u0bcd"},{"language":"ta","value":"\u0b85\u0ba3\u0bcd\u0b9f\u0bb5\u0bc6\u0bb3\u0bbf"}],"pt":[{"language":"pt","value":"Universo"},{"language":"pt","value":"cosmos"}],"ml":[{"language":"ml","value":"\u0d32\u0d4b\u0d15\u0d02"},{"language":"ml","value":"\u0d05\u0d23\u0d4d\u0d21\u0d15\u0d1f\u0d3e\u0d39\u0d02"},{"language":"ml","value":"\u0d2c\u0d4d\u0d30\u0d39\u0d4d\u0d2e\u0d3e\u0d23\u0d4d\u0d21\u0d02"}],"ilo":[{"language":"ilo","value":"uniberso"},{"language":"ilo","value":"universo"}],"cs":[{"language":"cs","value":"ve\u0161kerenstvo"},{"language":"cs","value":"univerzum"}],"ru":[{"language":"ru","value":"\u041c\u0438\u0440"},{"language":"ru","value":"\u041a\u043e\u0441\u043c\u043e\u0441"}],"sr":[{"language":"sr","value":"\u0443\u043d\u0438\u0432\u0435\u0440\u0437\u0443\u043c"}],"scn":[{"language":"scn","value":"cosmu"}],"hr":[{"language":"hr","value":"kozmos"},{"language":"hr","value":"univerzum"}],"pt-br":[{"language":"pt-br","value":"Cosmos"}]},"descriptions":{"la":{"language":"la","value":"res quae omnem materiam et spatium continet"},"en":{"language":"en","value":"totality of planets, stars, galaxies, intergalactic space, or all matter or all energy"},"fr":{"language":"fr","value":"ensemble des plan\u00e8tes, des \u00e9toiles, des galaxies, de l'espace intergalactique, ainsi que de toute la mati\u00e8re et de l'\u00e9nergie"},"pl":{"language":"pl","value":"Wszystko, co fizycznie istnieje: ca\u0142a przestrze\u0144, czas, wszystkie formy materii i energii oraz prawa fizyki i sta\u0142e fizyczne okre\u015blaj\u0105ce ich zachowanie."},"es":{"language":"es","value":"totalidad del espacio-tiempo, la materia y la energ\u00eda existentes"},"de":{"language":"de","value":"Gesamtheit der Energie, des Raumes und der Materie"},"it":{"language":"it","value":"insieme di tutto ci\u00f2 che esiste"},"eo":{"language":"eo","value":"la tuto de \u0109io ekzistanta, steloj, spaco, materio, energio ..."},"nb":{"language":"nb","value":"alt som eksisterer av rom, materie og str\u00e5ling"},"nn":{"language":"nn","value":"alt som eksisterer, derfor all fysisk masse og energi, planetar, stjerner, galaksar, og alt i det intergalaktiske rommet"},"en-gb":{"language":"en-gb","value":"totality of planets, stars, galaxies, intergalactic space, and all matter and energy"},"nl":{"language":"nl","value":"alle materie en energie binnen het gehele ruimte-tijdcontinu\u00fcm waarin wij bestaan"},"ko":{"language":"ko","value":"\ubb34\ud55c\ud55c \uc2dc\uac04\uacfc \ub9cc\ubb3c\uc744 \ud3ec\ud568\ud558\uace0 \uc788\ub294 \ub05d\uc5c6\ub294 \uacf5\uac04\uc758 \ucd1d\uccb4"},"ca":{"language":"ca","value":"totalitat de planetes, estrelles, gal\u00e0xies, espai intergal\u00e0ctic i tota la mat\u00e8ria i energia"},"fi":{"language":"fi","value":"avaruuden ja siin\u00e4 olevan aineen ja energian muodostama kokonaisuus"},"ru":{"language":"ru","value":"\u0444\u0443\u043d\u0434\u0430\u043c\u0435\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043d\u044f\u0442\u0438\u0435 \u0432 \u0430\u0441\u0442\u0440\u043e\u043d\u043e\u043c\u0438\u0438 \u0438 \u0444\u0438\u043b\u043e\u0441\u043e\u0444\u0438\u0438"},"zh-hans":{"language":"zh-hans","value":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u603b\u4f53"},"zh-hant":{"language":"zh-hant","value":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7e3d\u9ad4"},"zh-cn":{"language":"zh-cn","value":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u603b\u4f53"},"zh-sg":{"language":"zh-sg","value":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u603b\u4f53"},"zh-my":{"language":"zh-my","value":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u603b\u4f53"},"zh":{"language":"zh","value":"\u4e00\u5207\u7a7a\u95f4\u3001\u65f6\u95f4\u3001\u7269\u8d28\u548c\u80fd\u91cf\u6784\u6210\u7684\u603b\u4f53"},"zh-hk":{"language":"zh-hk","value":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7e3d\u9ad4"},"zh-tw":{"language":"zh-tw","value":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7e3d\u9ad4"},"zh-mo":{"language":"zh-mo","value":"\u4e00\u5207\u7a7a\u9593\u3001\u6642\u9593\u3001\u7269\u8cea\u548c\u80fd\u91cf\u69cb\u6210\u7684\u7e3d\u9ad4"},"ja":{"language":"ja","value":"\u60d1\u661f\u3001\u6052\u661f\u3001\u9280\u6cb3\u3001\u9280\u6cb3\u9593\u7a7a\u9593\u3001\u5168\u3066\u306e\u7269\u8cea\u3068\u30a8\u30cd\u30eb\u30ae\u30fc\u306e\u7dcf\u4f53"},"tr":{"language":"tr","value":"y\u0131ld\u0131zlar, gezegenler, gaz, toz, galaksileraras\u0131 madde ve k\u0131saca her \u015fey"},"uk":{"language":"uk","value":"\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c \u0443\u0441\u044c\u043e\u0433\u043e, \u0449\u043e \u0456\u0441\u043d\u0443\u0454: \u0447\u0430\u0441, \u043f\u0440\u043e\u0441\u0442\u0456\u0440, \u043c\u0430\u0442\u0435\u0440\u0456\u044f, \u0435\u043d\u0435\u0440\u0433\u0456\u044f"},"pt-br":{"language":"pt-br","value":"Tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia."},"ta":{"language":"ta","value":"\u0bb5\u0bc6\u0bb3\u0bbf \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0b95\u0bbe\u0bb2\u0bae\u0bcd \u0b86\u0b95\u0bbf\u0baf\u0bb5\u0bb1\u0bcd\u0bb1\u0bbf\u0ba9\u0bcd \u0bae\u0bc1\u0bb4\u0bc1\u0bae\u0bc8"},"ro":{"language":"ro","value":"totalitatea planetelor, stelelor, galaxiilor, spa\u0163iului intergalactic \u015fi al materiei \u015fi energiei"},"da":{"language":"da","value":"totaliteten af alt der eksisterer, inklusiv planeter, stjerner, galakser, det intergalaktiske rum, og alt stof og energi"},"fa":{"language":"fa","value":"\u0645\u062c\u0645\u0648\u0639\u0647 \u0633\u06cc\u0627\u0631\u0647\u200c\u0647\u0627\u060c \u0633\u062a\u0627\u0631\u06af\u0627\u0646\u060c \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627\u060c \u0641\u0636\u0627\u06cc \u0645\u06cc\u0627\u0646 \u06a9\u0647\u06a9\u0634\u0627\u0646\u200c\u0647\u0627 \u0648 \u0647\u0645\u0647 \u0645\u0627\u062f\u0647 \u0648 \u0627\u0646\u0631\u0698\u06cc"},"sv":{"language":"sv","value":"det utrymme som per definition inneh\u00e5ller allting, det vill s\u00e4ga all materia, energi, rumtiden, naturlagarna och alla h\u00e4ndelser"},"pt":{"language":"pt","value":"tudo o que existe fisicamente, a totalidade do espa\u00e7o e tempo e todas as formas de mat\u00e9ria e energia"},"ml":{"language":"ml","value":"\u0d2d\u0d57\u0d24\u0d3f\u0d15\u0d2e\u0d3e\u0d2f\u0d3f \u0d28\u0d3f\u0d32\u0d28\u0d3f\u0d7d\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d0e\u0d32\u0d4d\u0d32\u0d3e\u0d02 \u0d1a\u0d47\u0d7c\u0d28\u0d4d\u0d28\u0d24\u0d3e\u0d23\u0d4d \u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02"},"ilo":{"language":"ilo","value":"totalidad iti pannakaparsua a mairaman dagiti planeta, dagiti bituen, dagiti ariwanas, dagiti linaon ti intergalaktiko a limbang, ken amin a banag ken enerhia"},"cs":{"language":"cs","value":"ve\u0161kerenstvo"},"tl":{"language":"tl","value":"planeta, mga bituin, mga galaksiya, mga nilalaman ng intergalaktikong kalawakan, at lahat ng materya at enerhiya"},"oc":{"language":"oc","value":"ensemble de tot \u00e7\u00f2 qu'exit\u00eds"},"af":{"language":"af","value":"al die planete, sterre, sterrestelsels en intergalaktiese ruimtes, asook alle energie en materie"},"sr":{"language":"sr","value":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435"},"sr-ec":{"language":"sr-ec","value":"\u0441\u0432\u0435\u0443\u043a\u0443\u043f\u043d\u043e\u0441\u0442 \u043f\u043b\u0430\u043d\u0435\u0442\u0430, \u0437\u0432\u0435\u0437\u0434\u0430, \u0433\u0430\u043b\u0430\u043a\u0441\u0438\u0458\u0430, \u0438\u043d\u0442\u0435\u0440\u0433\u0430\u043b\u0430\u043a\u0442\u0438\u0447\u043a\u043e\u0433 \u043f\u0440\u043e\u0441\u0442\u043e\u0440\u0430, \u0438 \u0441\u0432\u0435 \u043c\u0430\u0442\u0435\u0440\u0438\u0458\u0435 \u0438 \u0435\u043d\u0435\u0440\u0433\u0438\u0458\u0435"},"sr-el":{"language":"sr-el","value":"sveukupnost planeta, zvezda, galaksija, intergalakti\u010dkog prostora, i sve materije i energije"},"th":{"language":"th","value":"\u0e40\u0e2d\u0e01\u0e20\u0e1e"},"br":{"language":"br","value":"hollad ar planedenno\u00f9, stered, st\u00eargoumoulenno\u00f9, egor etrest\u00eargoumoulek ha kement materi hag energiezh"},"hu":{"language":"hu","value":"bolyg\u00f3knak, csillagoknak, galaxisoknak, intergalaktikus t\u00e9rnek, valamint az anyagnak \u00e9s energi\u00e1nak \u00f6sszess\u00e9ge"},"vi":{"language":"vi","value":"to\u00e0n b\u1ed9 h\u1ec7 th\u1ed1ng kh\u00f4ng-th\u1eddi gian ch\u1ee9a v\u1eadt ch\u1ea5t v\u00e0 n\u0103ng l\u01b0\u1ee3ng"},"el":{"language":"el","value":"\u03c4\u03bf \u03c3\u03cd\u03bd\u03bf\u03bb\u03bf \u03c4\u03c9\u03bd \u03c0\u03c1\u03b1\u03b3\u03bc\u03ac\u03c4\u03c9\u03bd \u03c0\u03bf\u03c5 \u03c5\u03c0\u03ac\u03c1\u03c7\u03bf\u03c5\u03bd"},"ar":{"language":"ar","value":"\u0645\u0631\u062c\u0639"},"scn":{"language":"scn","value":"u nsiemi di tuttu chi\u1e0d\u1e0du chi' esisti"},"vec":{"language":"vec","value":"tute le r\u00f2be che existe, anca oltre a'l pianeta T\u00e8ra"},"gl":{"language":"gl","value":"totalidade do espazo e do tempo"},"sco":{"language":"sco","value":"totality o planets, starns, galaxies, intergalactic space, an aw matter an energy"},"hr":{"language":"hr","value":"\u010ditav prostorno i vremenski beskona\u010dan svijet koji nas okru\u017euje"}},"claims":{"P1036":[{"id":"Q1$f5d5115d-489a-7654-9a0a-5eea5be80d07","mainsnak":{"snaktype":"value","property":"P1036","datatype":"string","datavalue":{"value":"113","type":"string"}},"type":"statement","rank":"normal"},{"id":"Q1$f3c718d6-46cd-832b-4365-b6d7c571f594","mainsnak":{"snaktype":"value","property":"P1036","datatype":"string","datavalue":{"value":"523.1","type":"string"}},"type":"statement","rank":"normal"}],"P1245":[{"id":"Q1$6aec36ba-4704-214c-d11b-c183e1f061e4","mainsnak":{"snaktype":"value","property":"P1245","datatype":"string","datavalue":{"value":"8506","type":"string"}},"type":"statement","rank":"normal"}],"P793":[{"id":"Q1$e70e289c-471e-36b8-50ff-25612cf24e70","mainsnak":{"snaktype":"value","property":"P793","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":323},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q1$7b881a36-4708-3c1e-f05d-fd4eb0322087","mainsnak":{"snaktype":"value","property":"P793","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":273508},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q1$c959d8cc-46e6-38b6-75ba-5204a015d4e5","mainsnak":{"snaktype":"value","property":"P793","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":837317},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P31":[{"id":"q1$0479EB23-FC5B-4EEC-9529-CEE21D6C6FA9","mainsnak":{"snaktype":"value","property":"P31","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1454986},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P227":[{"id":"q1$4E4479B7-920C-4AB3-A405-5F3A2168DE91","mainsnak":{"snaktype":"value","property":"P227","datatype":"string","datavalue":{"value":"4079154-3","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P373":[{"id":"q1$BD33C4D4-8E79-40FA-BB26-475CA5E732CE","mainsnak":{"snaktype":"value","property":"P373","datatype":"string","datavalue":{"value":"Universe","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"7eb64cf9621d34c54fd4bd040ed4b61a88c4a1a0","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":328},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P508":[{"id":"q1$766D285D-5EA2-49FA-BDDE-915E3851ECFD","mainsnak":{"snaktype":"value","property":"P508","datatype":"string","datavalue":{"value":"7239","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"33ef5efac8ec9d2de05f19b852ff1698d9036c26","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":460907},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P18":[{"id":"q1$fd1de6d2-4522-5d35-5e15-e7e144452ba9","mainsnak":{"snaktype":"value","property":"P18","datatype":"commonsMedia","datavalue":{"value":"Hubble ultra deep field.jpg","type":"string"}},"type":"statement","rank":"normal"}],"P910":[{"id":"Q1$41A4AA15-DF3F-49C9-842C-A2AF0BBCAAD0","mainsnak":{"snaktype":"value","property":"P910","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5551050},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P349":[{"id":"Q1$E0551ECA-8ADE-46E0-AAE7-2C4685C91E89","mainsnak":{"snaktype":"value","property":"P349","datatype":"string","datavalue":{"value":"00574074","type":"string"}},"type":"statement","rank":"normal"}],"P361":[{"id":"q1$21f31f42-4f4d-79b0-0380-92039776e884","mainsnak":{"snaktype":"value","property":"P361","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":3327819},"type":"wikibase-entityid"}},"qualifiers":{"P31":[{"hash":"94962579945ddcb356b701e18b46a8ca04361fac","snaktype":"value","property":"P31","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":41719},"type":"wikibase-entityid"}}]},"qualifiers-order":["P31"],"type":"statement","rank":"deprecated"}],"P580":[{"id":"Q1$789eef0c-4108-cdda-1a63-505cdd324564","mainsnak":{"snaktype":"value","property":"P580","datatype":"time","datavalue":{"value":{"time":"-13798000000-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":3,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}},"qualifiers":{"P459":[{"hash":"188fed94b60f7fa40c5a4a5546df5e45b577f7a3","snaktype":"value","property":"P459","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15605},"type":"wikibase-entityid"}},{"hash":"08f7d11dbd1140f69d2b2152d8b9332e6b2360b4","snaktype":"value","property":"P459","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":76250},"type":"wikibase-entityid"}}],"P805":[{"hash":"751608ca7ec900cb6e6e16e5fc5bbf89447f18a7","snaktype":"value","property":"P805","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":500699},"type":"wikibase-entityid"}}]},"qualifiers-order":["P459","P805"],"type":"statement","rank":"normal","references":[{"hash":"79885b9674cf6fdb3134592581a095f0c6c4d9d3","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15217920},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P1051":[{"id":"Q1$D6ECBA3F-C018-4C0C-A7FA-62B858782609","mainsnak":{"snaktype":"value","property":"P1051","datatype":"string","datavalue":{"value":"517","type":"string"}},"type":"statement","rank":"normal"}]},"sitelinks":{"afwiki":{"site":"afwiki","title":"Heelal","badges":[]},"alswiki":{"site":"alswiki","title":"Universum","badges":[]},"anwiki":{"site":"anwiki","title":"Universo","badges":[]},"arcwiki":{"site":"arcwiki","title":"\u072c\u0712\u071d\u0720","badges":[]},"arwiki":{"site":"arwiki","title":"\u0641\u0636\u0627\u0621 \u0643\u0648\u0646\u064a","badges":[]},"arzwiki":{"site":"arzwiki","title":"\u0643\u0648\u0646","badges":[]},"astwiki":{"site":"astwiki","title":"Universu","badges":[]},"azwiki":{"site":"azwiki","title":"Kainat","badges":[]},"barwiki":{"site":"barwiki","title":"W\u00f6dall","badges":[]},"bat_smgwiki":{"site":"bat_smgwiki","title":"V\u0117sata","badges":[]},"bawiki":{"site":"bawiki","title":"\u0492\u0430\u043b\u04d9\u043c","badges":["Q17437798"]},"be_x_oldwiki":{"site":"be_x_oldwiki","title":"\u0421\u0443\u0441\u044c\u0432\u0435\u0442","badges":[]},"bewiki":{"site":"bewiki","title":"\u0421\u0443\u0441\u0432\u0435\u0442","badges":[]},"bgwiki":{"site":"bgwiki","title":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"bgwikiquote":{"site":"bgwikiquote","title":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430 \u2014 \u041a\u043e\u0441\u043c\u043e\u0441 \u2014 \u0421\u0432\u0435\u0442\u043e\u0432\u0435","badges":[]},"bnwiki":{"site":"bnwiki","title":"\u09ae\u09b9\u09be\u09ac\u09bf\u09b6\u09cd\u09ac","badges":[]},"brwiki":{"site":"brwiki","title":"Hollved","badges":[]},"bswiki":{"site":"bswiki","title":"Svemir","badges":[]},"bswikiquote":{"site":"bswikiquote","title":"Svemir","badges":[]},"bxrwiki":{"site":"bxrwiki","title":"\u041e\u0440\u0448\u043e\u043b\u043e\u043d","badges":[]},"cawiki":{"site":"cawiki","title":"Univers","badges":[]},"cawikiquote":{"site":"cawikiquote","title":"Univers","badges":[]},"cdowiki":{"site":"cdowiki","title":"\u1e72\u0304-d\u00eau","badges":[]},"cewiki":{"site":"cewiki","title":"\u04c0\u0430\u043b\u0430\u043c","badges":[]},"ckbwiki":{"site":"ckbwiki","title":"\u06af\u06d5\u0631\u062f\u0648\u0648\u0646","badges":[]},"csbwiki":{"site":"csbwiki","title":"Swiatnica","badges":[]},"cswiki":{"site":"cswiki","title":"Vesm\u00edr","badges":[]},"cswikiquote":{"site":"cswikiquote","title":"Vesm\u00edr","badges":[]},"cvwiki":{"site":"cvwiki","title":"\u00c7\u0443\u0442 \u0422\u0115\u043d\u0447\u0435","badges":[]},"cywiki":{"site":"cywiki","title":"Bydysawd (seryddiaeth)","badges":[]},"dawiki":{"site":"dawiki","title":"Universet","badges":[]},"dewiki":{"site":"dewiki","title":"Universum","badges":[]},"dewikiquote":{"site":"dewikiquote","title":"Universum","badges":[]},"dsbwiki":{"site":"dsbwiki","title":"Uniwersum","badges":[]},"elwiki":{"site":"elwiki","title":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"elwikiquote":{"site":"elwikiquote","title":"\u03a3\u03cd\u03bc\u03c0\u03b1\u03bd","badges":[]},"enwiki":{"site":"enwiki","title":"Universe","badges":[]},"enwikiquote":{"site":"enwikiquote","title":"Universe","badges":[]},"eowiki":{"site":"eowiki","title":"Universo","badges":[]},"eowikiquote":{"site":"eowikiquote","title":"Universo","badges":[]},"eswiki":{"site":"eswiki","title":"Universo","badges":[]},"eswikiquote":{"site":"eswikiquote","title":"Universo","badges":[]},"etwiki":{"site":"etwiki","title":"Universum","badges":[]},"euwiki":{"site":"euwiki","title":"Unibertso","badges":[]},"fawiki":{"site":"fawiki","title":"\u06af\u06cc\u062a\u06cc","badges":[]},"fiwiki":{"site":"fiwiki","title":"Maailmankaikkeus","badges":["Q17437796"]},"fowiki":{"site":"fowiki","title":"Alheimurin","badges":[]},"frrwiki":{"site":"frrwiki","title":"\u00c5\u00e5l","badges":[]},"frwiki":{"site":"frwiki","title":"Univers","badges":[]},"frwikiquote":{"site":"frwikiquote","title":"Univers","badges":[]},"fywiki":{"site":"fywiki","title":"Hielal","badges":[]},"gawiki":{"site":"gawiki","title":"An Chruinne","badges":[]},"glwiki":{"site":"glwiki","title":"Universo","badges":[]},"gnwiki":{"site":"gnwiki","title":"Arapy","badges":[]},"guwiki":{"site":"guwiki","title":"\u0aac\u0acd\u0ab0\u0ab9\u0acd\u0aae\u0abe\u0a82\u0aa1","badges":[]},"hakwiki":{"site":"hakwiki","title":"Y\u00ee-chhiu","badges":[]},"hewiki":{"site":"hewiki","title":"\u05d4\u05d9\u05e7\u05d5\u05dd","badges":[]},"hifwiki":{"site":"hifwiki","title":"Sansaar","badges":[]},"hiwiki":{"site":"hiwiki","title":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"hrwiki":{"site":"hrwiki","title":"Svemir","badges":[]},"htwiki":{"site":"htwiki","title":"Liniv\u00e8","badges":[]},"huwiki":{"site":"huwiki","title":"Vil\u00e1gegyetem","badges":[]},"hywiki":{"site":"hywiki","title":"\u054f\u056b\u0565\u0566\u0565\u0580\u0584","badges":[]},"iawiki":{"site":"iawiki","title":"Universo","badges":[]},"idwiki":{"site":"idwiki","title":"Alam semesta","badges":[]},"ilowiki":{"site":"ilowiki","title":"Law-ang","badges":[]},"iowiki":{"site":"iowiki","title":"Universo","badges":[]},"iswiki":{"site":"iswiki","title":"Alheimurinn","badges":[]},"itwiki":{"site":"itwiki","title":"Universo","badges":[]},"itwikibooks":{"site":"itwikibooks","title":"Universo","badges":[]},"itwikiquote":{"site":"itwikiquote","title":"Universo","badges":[]},"jawiki":{"site":"jawiki","title":"\u5b87\u5b99","badges":[]},"jvwiki":{"site":"jvwiki","title":"Alam semesta","badges":[]},"kawiki":{"site":"kawiki","title":"\u10e1\u10d0\u10db\u10e7\u10d0\u10e0\u10dd","badges":[]},"kkwiki":{"site":"kkwiki","title":"\u04d8\u043b\u0435\u043c","badges":[]},"knwiki":{"site":"knwiki","title":"\u0cac\u0ccd\u0cb0\u0cb9\u0ccd\u0cae\u0cbe\u0c82\u0ca1","badges":[]},"kowiki":{"site":"kowiki","title":"\uc6b0\uc8fc","badges":[]},"krcwiki":{"site":"krcwiki","title":"\u0411\u043e\u043b\u0443\u043c","badges":[]},"kuwiki":{"site":"kuwiki","title":"Gerd\u00fbn","badges":[]},"lawiki":{"site":"lawiki","title":"Universum","badges":[]},"lezwiki":{"site":"lezwiki","title":"\u0427\u0438\u043b\u0435\u0440-\u0446\u0430\u0432\u0430\u0440","badges":[]},"liwiki":{"site":"liwiki","title":"Universum","badges":[]},"lmowiki":{"site":"lmowiki","title":"\u00dcnivers","badges":[]},"ltwiki":{"site":"ltwiki","title":"Visata","badges":[]},"ltwikiquote":{"site":"ltwikiquote","title":"Visata","badges":[]},"lvwiki":{"site":"lvwiki","title":"Visums","badges":[]},"mkwiki":{"site":"mkwiki","title":"\u0412\u0441\u0435\u043b\u0435\u043d\u0430","badges":[]},"mlwiki":{"site":"mlwiki","title":"\u0d2a\u0d4d\u0d30\u0d2a\u0d1e\u0d4d\u0d1a\u0d02","badges":[]},"mnwiki":{"site":"mnwiki","title":"\u041e\u0440\u0447\u043b\u043e\u043d","badges":[]},"mrwiki":{"site":"mrwiki","title":"\u0935\u093f\u0936\u094d\u0935","badges":[]},"mswiki":{"site":"mswiki","title":"Alam semesta","badges":[]},"mwlwiki":{"site":"mwlwiki","title":"Ouniberso","badges":[]},"mywiki":{"site":"mywiki","title":"\u1005\u1000\u103c\u101d\u1020\u102c","badges":[]},"nahwiki":{"site":"nahwiki","title":"Cemanahuac","badges":["Q17437798"]},"napwiki":{"site":"napwiki","title":"Annevierzo","badges":["Q17437796"]},"nds_nlwiki":{"site":"nds_nlwiki","title":"Hielal","badges":[]},"ndswiki":{"site":"ndswiki","title":"Weltruum","badges":[]},"newiki":{"site":"newiki","title":"\u092c\u094d\u0930\u0939\u094d\u092e\u093e\u0923\u094d\u0921","badges":[]},"nlwiki":{"site":"nlwiki","title":"Heelal","badges":[]},"nnwiki":{"site":"nnwiki","title":"Universet","badges":[]},"nnwikiquote":{"site":"nnwikiquote","title":"Universet","badges":[]},"novwiki":{"site":"novwiki","title":"Universe","badges":[]},"nowiki":{"site":"nowiki","title":"Universet","badges":[]},"nrmwiki":{"site":"nrmwiki","title":"Eunivers","badges":[]},"nvwiki":{"site":"nvwiki","title":"Y\u00e1gh\u00e1hook\u00e1\u00e1n","badges":[]},"ocwiki":{"site":"ocwiki","title":"Univ\u00e8rs","badges":[]},"pamwiki":{"site":"pamwiki","title":"Sikluban","badges":[]},"papwiki":{"site":"papwiki","title":"Universo","badges":[]},"pawiki":{"site":"pawiki","title":"\u0a2c\u0a4d\u0a30\u0a39\u0a3f\u0a2e\u0a70\u0a21","badges":[]},"pflwiki":{"site":"pflwiki","title":"Weldall","badges":[]},"plwiki":{"site":"plwiki","title":"Wszech\u015bwiat","badges":[]},"plwikiquote":{"site":"plwikiquote","title":"Wszech\u015bwiat","badges":[]},"pmswiki":{"site":"pmswiki","title":"Univers","badges":[]},"pnbwiki":{"site":"pnbwiki","title":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"ptwiki":{"site":"ptwiki","title":"Universo","badges":[]},"ptwikiquote":{"site":"ptwikiquote","title":"Universo","badges":[]},"quwiki":{"site":"quwiki","title":"Ch'askancha","badges":[]},"rowiki":{"site":"rowiki","title":"Univers","badges":[]},"ruewiki":{"site":"ruewiki","title":"\u0412\u0435\u0441\u043c\u0456\u0440","badges":[]},"ruwiki":{"site":"ruwiki","title":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"ruwikinews":{"site":"ruwikinews","title":"\u041a\u0430\u0442\u0435\u0433\u043e\u0440\u0438\u044f:\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"ruwikiquote":{"site":"ruwikiquote","title":"\u0412\u0441\u0435\u043b\u0435\u043d\u043d\u0430\u044f","badges":[]},"scnwiki":{"site":"scnwiki","title":"Universu","badges":["Q17437796"]},"scowiki":{"site":"scowiki","title":"Universe","badges":[]},"sdwiki":{"site":"sdwiki","title":"\u06aa\u0627\u0626\u0646\u0627\u062a","badges":[]},"shwiki":{"site":"shwiki","title":"Svemir","badges":[]},"simplewiki":{"site":"simplewiki","title":"Universe","badges":[]},"siwiki":{"site":"siwiki","title":"\u0dc0\u0dd2\u0dc1\u0dca\u0dc0\u0dba","badges":[]},"skwiki":{"site":"skwiki","title":"Vesm\u00edr","badges":[]},"skwikiquote":{"site":"skwikiquote","title":"Vesm\u00edr","badges":[]},"slwiki":{"site":"slwiki","title":"Vesolje","badges":[]},"slwikiquote":{"site":"slwikiquote","title":"Vesolje","badges":[]},"snwiki":{"site":"snwiki","title":"Denganyika","badges":[]},"sowiki":{"site":"sowiki","title":"Koon","badges":[]},"sqwiki":{"site":"sqwiki","title":"Gjith\u00ebsia","badges":[]},"srwiki":{"site":"srwiki","title":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"srwikiquote":{"site":"srwikiquote","title":"\u0421\u0432\u0435\u043c\u0438\u0440","badges":[]},"stqwiki":{"site":"stqwiki","title":"Al","badges":[]},"suwiki":{"site":"suwiki","title":"Jagat","badges":[]},"svwiki":{"site":"svwiki","title":"Universum","badges":[]},"swwiki":{"site":"swwiki","title":"Ulimwengu","badges":[]},"tawiki":{"site":"tawiki","title":"\u0b85\u0ba3\u0bcd\u0b9f\u0bae\u0bcd","badges":[]},"tewiki":{"site":"tewiki","title":"\u0c35\u0c3f\u0c36\u0c4d\u0c35\u0c02","badges":[]},"tgwiki":{"site":"tgwiki","title":"\u041a\u043e\u0438\u043d\u043e\u0442","badges":[]},"thwiki":{"site":"thwiki","title":"\u0e40\u0e2d\u0e01\u0e20\u0e1e","badges":[]},"tlwiki":{"site":"tlwiki","title":"Sansinukob","badges":[]},"trwiki":{"site":"trwiki","title":"Evren","badges":[]},"ttwiki":{"site":"ttwiki","title":"\u0413\u0430\u043b\u04d9\u043c","badges":[]},"ukwiki":{"site":"ukwiki","title":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"ukwikiquote":{"site":"ukwikiquote","title":"\u0412\u0441\u0435\u0441\u0432\u0456\u0442","badges":[]},"urwiki":{"site":"urwiki","title":"\u06a9\u0627\u0626\u0646\u0627\u062a","badges":[]},"uzwiki":{"site":"uzwiki","title":"Olam","badges":[]},"vepwiki":{"site":"vepwiki","title":"Mir","badges":[]},"viwiki":{"site":"viwiki","title":"V\u0169 tr\u1ee5","badges":[]},"warwiki":{"site":"warwiki","title":"Sangkalibutan","badges":[]},"xhwiki":{"site":"xhwiki","title":"Ihlabathi","badges":[]},"xmfwiki":{"site":"xmfwiki","title":"\u10dd\u10e5\u10d8\u10d0\u10dc\u10e3","badges":[]},"yiwiki":{"site":"yiwiki","title":"\u05d0\u05d5\u05e0\u05d9\u05d5\u05d5\u05e2\u05e8\u05e1","badges":[]},"zh_classicalwiki":{"site":"zh_classicalwiki","title":"\u5b87\u5b99","badges":[]},"zh_min_nanwiki":{"site":"zh_min_nanwiki","title":"\u00da-ti\u016b","badges":[]},"zh_yuewiki":{"site":"zh_yuewiki","title":"\u5b87\u5b99","badges":[]},"zhwiki":{"site":"zhwiki","title":"\u5b87\u5b99","badges":[]}}}},"success":1}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-Q32063953.json000066400000000000000000004244731444772566300306460ustar00rootroot00000000000000{"entities":{"Q32063953":{"pageid":1374,"ns":0,"title":"Q1040","lastrevid":619336121,"modified":"2018-01-12T15:50:54Z","redirects":{"from":"Q32063953","to":"Q1040"},"type":"item","id":"Q1040","labels":{"en":{"language":"en","value":"Karlsruhe"},"fr":{"language":"fr","value":"Karlsruhe"},"de":{"language":"de","value":"Karlsruhe"},"af":{"language":"af","value":"Karlsruhe"},"ar":{"language":"ar","value":"\u0643\u0627\u0631\u0644\u0633\u0631\u0648\u0647"},"az":{"language":"az","value":"Karlsrue"},"bar":{"language":"bar","value":"Karlsruhe"},"be":{"language":"be","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"br":{"language":"br","value":"Karlsruhe"},"bg":{"language":"bg","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435"},"ca":{"language":"ca","value":"Karlsruhe"},"cs":{"language":"cs","value":"Karlsruhe"},"cv":{"language":"cv","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"cy":{"language":"cy","value":"Karlsruhe"},"da":{"language":"da","value":"Karlsruhe"},"el":{"language":"el","value":"\u039a\u03b1\u03c1\u03bb\u03c3\u03c1\u03bf\u03cd\u03b7"},"eo":{"language":"eo","value":"Karlsruhe"},"es":{"language":"es","value":"Karlsruhe"},"et":{"language":"et","value":"Karlsruhe"},"eu":{"language":"eu","value":"Karlsruhe"},"fa":{"language":"fa","value":"\u06a9\u0627\u0631\u0644\u0633\u0631\u0648\u0647\u0647"},"fi":{"language":"fi","value":"Karlsruhe"},"lb":{"language":"lb","value":"Karlsruhe"},"zh":{"language":"zh","value":"\u5361\u5c14\u65af\u9c81\u5384"},"pl":{"language":"pl","value":"Karlsruhe"},"gd":{"language":"gd","value":"Karlsruhe"},"uz":{"language":"uz","value":"Karlsruhe"},"vo":{"language":"vo","value":"Karlsruhe"},"oc":{"language":"oc","value":"Karlsruhe"},"hu":{"language":"hu","value":"Karlsruhe"},"sw":{"language":"sw","value":"Karlsruhe"},"sq":{"language":"sq","value":"Karlsruhe"},"pnb":{"language":"pnb","value":"\u06a9\u0627\u0631\u0644\u0632\u0631\u0648\u06c1\u06d2 \u0645\u062d\u0644"},"nl":{"language":"nl","value":"Karlsruhe"},"sv":{"language":"sv","value":"Karlsruhe"},"pt":{"language":"pt","value":"Karlsruhe"},"is":{"language":"is","value":"Karlsruhe"},"ru":{"language":"ru","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"sr":{"language":"sr","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435"},"tr":{"language":"tr","value":"Karlsruhe"},"mk":{"language":"mk","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435"},"pfl":{"language":"pfl","value":"Kallsruh"},"uk":{"language":"uk","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435"},"nn":{"language":"nn","value":"Karlsruhe"},"io":{"language":"io","value":"Karlsruhe"},"hr":{"language":"hr","value":"Karlsruhe"},"lmo":{"language":"lmo","value":"Karlsruhe"},"kk":{"language":"kk","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"nan":{"language":"nan","value":"Karlsruhe"},"ko":{"language":"ko","value":"\uce74\ub97c\uc2a4\ub8e8\uc5d0"},"he":{"language":"he","value":"\u05e7\u05e8\u05dc\u05e1\u05e8\u05d5\u05d4\u05d4"},"frr":{"language":"frr","value":"Karlsruhe"},"lv":{"language":"lv","value":"Karlsr\u016be"},"it":{"language":"it","value":"Karlsruhe"},"gl":{"language":"gl","value":"Karlsruhe"},"id":{"language":"id","value":"Karlsruhe"},"ja":{"language":"ja","value":"\u30ab\u30fc\u30eb\u30b9\u30eb\u30fc\u30a8"},"vi":{"language":"vi","value":"Karlsruhe"},"sh":{"language":"sh","value":"Karlsruhe"},"sk":{"language":"sk","value":"Karlsruhe"},"ku":{"language":"ku","value":"Karlsruhe"},"fy":{"language":"fy","value":"Karlsruhe"},"ro":{"language":"ro","value":"Karlsruhe"},"la":{"language":"la","value":"Carolsruha"},"war":{"language":"war","value":"Karlsruhe"},"mr":{"language":"mr","value":"\u0915\u093e\u0930\u094d\u0932\u094d\u0938\u0930\u0942\u0939"},"ka":{"language":"ka","value":"\u10d9\u10d0\u10e0\u10da\u10e1\u10e0\u10e3\u10d4"},"nds":{"language":"nds","value":"Karlsruhe"},"lt":{"language":"lt","value":"Karlsr\u016bj\u0117"},"en-ca":{"language":"en-ca","value":"Karlsruhe"},"en-gb":{"language":"en-gb","value":"Karlsruhe"},"gsw":{"language":"gsw","value":"Karlsrueh"},"be-tarask":{"language":"be-tarask","value":"\u041a\u0430\u0440\u043b\u044c\u0441\u0440\u0443\u044d"},"pt-br":{"language":"pt-br","value":"Karlsruhe"},"stq":{"language":"stq","value":"Karlsruhe"},"th":{"language":"th","value":"\u0e04\u0e32\u0e23\u0e4c\u0e25\u0e2a\u0e23\u0e39\u0e40\u0e2d\u0e2d"},"ta":{"language":"ta","value":"\u0b95\u0bbe\u0bb0\u0bcd\u0bb2\u0bcd\u0b9a\u0bc1\u0bb0\u0bc1\u0bb9\u0bcd"},"sah":{"language":"sah","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"nb":{"language":"nb","value":"Karlsruhe"},"sco":{"language":"sco","value":"Karlsruhe"},"tw":{"language":"tw","value":"Karlsruhe"},"yi":{"language":"yi","value":"\u05e7\u05d0\u05e8\u05dc\u05e1\u05e8\u05d5\u05e2"},"hy":{"language":"hy","value":"\u053f\u0561\u0580\u056c\u057d\u0580\u0578\u0582\u0565"},"hi":{"language":"hi","value":"\u0915\u093e\u0930\u094d\u0932\u094d\u0938\u0930\u0941\u0939\u0947"},"ie":{"language":"ie","value":"Karlsruhe"},"szl":{"language":"szl","value":"Karlsruhe"},"yue":{"language":"yue","value":"\u5361\u65af\u9b6f\u4e9e"},"mn":{"language":"mn","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"ms":{"language":"ms","value":"Karlsruhe"},"sl":{"language":"sl","value":"Karlsruhe"},"an":{"language":"an","value":"Karlsruhe"},"ast":{"language":"ast","value":"Karlsruhe"},"co":{"language":"co","value":"Karlsruhe"},"de-at":{"language":"de-at","value":"Karlsruhe"},"de-ch":{"language":"de-ch","value":"Karlsruhe"},"frp":{"language":"frp","value":"Karlsruhe"},"fur":{"language":"fur","value":"Karlsruhe"},"ga":{"language":"ga","value":"Karlsruhe"},"ia":{"language":"ia","value":"Karlsruhe"},"kg":{"language":"kg","value":"Karlsruhe"},"li":{"language":"li","value":"Karlsruhe"},"lij":{"language":"lij","value":"Karlsruhe"},"mg":{"language":"mg","value":"Karlsruhe"},"min":{"language":"min","value":"Karlsruhe"},"nap":{"language":"nap","value":"Karlsruhe"},"nds-nl":{"language":"nds-nl","value":"Karlsruhe"},"nrm":{"language":"nrm","value":"Karlsruhe"},"pcd":{"language":"pcd","value":"Karlsruhe"},"pms":{"language":"pms","value":"Karlsruhe"},"rm":{"language":"rm","value":"Karlsruhe"},"sc":{"language":"sc","value":"Karlsruhe"},"scn":{"language":"scn","value":"Karlsruhe"},"sr-el":{"language":"sr-el","value":"Karlsruhe"},"vec":{"language":"vec","value":"Karlsruhe"},"vls":{"language":"vls","value":"Karlsruhe"},"wa":{"language":"wa","value":"Karlsruhe"},"wo":{"language":"wo","value":"Karlsruhe"},"zu":{"language":"zu","value":"Karlsruhe"},"hsb":{"language":"hsb","value":"Karlsruhe"},"dsb":{"language":"dsb","value":"Karlsruhe"},"tk":{"language":"tk","value":"Karlsrue"},"tt":{"language":"tt","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},"hak":{"language":"hak","value":"Karlsruhe"},"azb":{"language":"azb","value":"\u06a9\u0627\u0631\u0644\u0633\u0631\u0648\u0647\u0647"},"te":{"language":"te","value":"\u0c15\u0c3e\u0c30\u0c4d\u0c32\u0c4d\u0c38\u0c4d\u0c30\u0c41\u0c39\u0c46"},"bn":{"language":"bn","value":"\u0995\u09be\u09b0\u09cd\u09b2\u09b8\u09b0\u09c1\u09b9\u09c7"},"ur":{"language":"ur","value":"\u06a9\u0631\u0644\u0633\u0631\u0648\u062d\u06cc"},"si":{"language":"si","value":"\u0d9a\u0dca\u0dbd\u0dca\u0dc1\u0dca\u0dbb\u0dd4\u0dc4\u0dd2"},"gu":{"language":"gu","value":"\u0a95\u0abe\u0ab0\u0acd\u0ab2\u0acd\u0ab8\u0ab0\u0ac1\u0ab9"},"kn":{"language":"kn","value":"\u0c95\u0cbe\u0cb0\u0ccd\u0cb2\u0ccd\u0cb8\u0ccd\u0cb0\u0cc1\u0cb9\u0cc6"},"ceb":{"language":"ceb","value":"Karlsruhe"}},"descriptions":{"en":{"language":"en","value":"German city in the state of Baden-W\u00fcrttemberg"},"fr":{"language":"fr","value":"ville du Bade-Wurtemberg"},"de":{"language":"de","value":"Gro\u00dfstadt in Baden-W\u00fcrttemberg"},"ca":{"language":"ca","value":"Ciutat alemanya pertanyent al Land de Baden-W\u00fcrttemberg."},"it":{"language":"it","value":"citt\u00e0 extracircondariale tedesca"},"es":{"language":"es","value":"ciudad y distrito urbano de Alemania"},"ru":{"language":"ru","value":"\u0433\u043e\u0440\u043e\u0434 \u0432 \u0437\u0435\u043c\u043b\u0435 \u0411\u0430\u0434\u0435\u043d-\u0412\u044e\u0440\u0442\u0435\u043c\u0431\u0435\u0440\u0433, \u0413\u0435\u0440\u043c\u0430\u043d\u0438\u044f"},"fa":{"language":"fa","value":"\u06cc\u06a9\u06cc \u0627\u0632 \u0634\u0647\u0631\u0647\u0627\u06cc \u0645\u062d\u0642\u0642 \u067e\u0631\u0648\u0631 \u0622\u0644\u0645\u0627\u0646"},"nl":{"language":"nl","value":"stad"},"pl":{"language":"pl","value":"miasto w Niemczech, w Badenii-Wirtembergii"},"zh-hans":{"language":"zh-hans","value":"\u4f4d\u4e8e\u5fb7\u56fd\u5df4\u767b-\u7b26\u817e\u5821\u5dde\u7684\u57ce\u5e02"},"hu":{"language":"hu","value":"v\u00e1ros Baden-W\u00fcrttemberg tartom\u00e1nyban, N\u00e9metorsz\u00e1g"},"el":{"language":"el","value":"\u03c0\u03cc\u03bb\u03b7 \u03c4\u03b7\u03c2 \u0393\u03b5\u03c1\u03bc\u03b1\u03bd\u03af\u03b1\u03c2, \u03c3\u03c4\u03b7\u03bd \u0392\u03ac\u03b4\u03b7-\u0392\u03c5\u03c1\u03c4\u03b5\u03bc\u03b2\u03ad\u03c1\u03b3\u03b7"},"sl":{"language":"sl","value":"mesto v Nem\u010diji"},"ne":{"language":"ne","value":"\u092c\u093e\u0921\u0947\u0928-\u0935\u094d\u092f\u0941\u0930\u094d\u091f\u0947\u092c\u0930\u094d\u0917 \u0930\u093e\u091c\u094d\u092f\u0915\u094b \u091c\u0930\u094d\u092e\u0928 \u0938\u0939\u0930"},"he":{"language":"he","value":"\u05e2\u05d9\u05e8 \u05d1\u05d2\u05e8\u05de\u05e0\u05d9\u05d4"},"ja":{"language":"ja","value":"\u30c9\u30a4\u30c4\u306e\u90fd\u5e02"},"eo":{"language":"eo","value":"urbo en Baden-Virtembergo, Germanio"},"sq":{"language":"sq","value":"qytet n\u00eb Gjermani"},"kn":{"language":"kn","value":"\u0cac\u0cbe\u0ca1\u0cc6\u0ca8\u0ccd-\u0cb5\u0cc1\u0cb0\u0ccd\u0c9f\u0cc6\u0c82\u0cac\u0cb0\u0ccd\u0c97\u0ccd \u0cb0\u0cbe\u0c9c\u0ccd\u0caf\u0ca6 \u0c9c\u0cb0\u0ccd\u0cae\u0ca8\u0ccd \u0ca8\u0c97\u0cb0"},"lb":{"language":"lb","value":"Stad a Baden-W\u00fcrttemberg, D\u00e4itschland"},"hsb":{"language":"hsb","value":"m\u011bsto w Badensko-W\u00fcrttembergskej"}},"aliases":{"pl":[{"language":"pl","value":"Carlsruhe"}],"uz":[{"language":"uz","value":"Karlsrue"},{"language":"uz","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"}],"nl":[{"language":"nl","value":"Karelsrust"}],"ru":[{"language":"ru","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435"}],"sr":[{"language":"sr","value":"Karlsruhe"},{"language":"sr","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0445\u0435"}],"mk":[{"language":"mk","value":"\u10d9\u10d0\u10e0\u10da\u10e1\u10e0\u10e3\u10d4"},{"language":"mk","value":"Karlsr\u016bj\u0117"},{"language":"mk","value":"\u0413\u043e\u0440\u0430\u0434 \u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},{"language":"mk","value":"\u5361\u5c14\u65af\u9c81\u5384"},{"language":"mk","value":"Karlsr\u016be"},{"language":"mk","value":"\u30ab\u30fc\u30eb\u30b9\u30eb\u30fc\u30a8"},{"language":"mk","value":"\u0915\u093e\u0930\u094d\u0932\u094d\u0938\u0930\u0942\u0939"},{"language":"mk","value":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d"},{"language":"mk","value":"Karlsruhe"},{"language":"mk","value":"\u039a\u03b1\u03c1\u03bb\u03c3\u03c1\u03bf\u03cd\u03b7"},{"language":"mk","value":"\uce74\ub97c\uc2a4\ub8e8\uc5d0"},{"language":"mk","value":"Karlsrue"},{"language":"mk","value":"\u041a\u0430\u0440\u043b\u044c\u0441\u0440\u0443\u044d"},{"language":"mk","value":"Carolsruha"},{"language":"mk","value":"\u06a9\u0627\u0631\u0644\u0633\u0631\u0648\u0647\u0647"},{"language":"mk","value":"\u0643\u0627\u0631\u0644\u0633\u0631\u0648\u0647"},{"language":"mk","value":"\u05e7\u05e8\u05dc\u05e1\u05e8\u05d5\u05d4\u05d4"}],"pfl":[{"language":"pfl","value":"Karlsruh"},{"language":"pfl","value":"Karlsruhe"}],"ko":[{"language":"ko","value":"\uce7c\uc2a4\ub8e8\ud5e4"},{"language":"ko","value":"\uce7c\uc2a4\ub8e8\uc5d0"}],"he":[{"language":"he","value":"\u05e7\u05d0\u05e8\u05dc\u05e1\u05e8\u05d5\u05d0\u05d4"},{"language":"he","value":"\u05e7\u05d0\u05e8\u05dc\u05e1\u05e8\u05d5\u05d4\u05d4"},{"language":"he","value":"\u05e7\u05e8\u05dc\u05e1\u05e8\u05d5\u05d0"}],"sh":[{"language":"sh","value":"Karlsrue"}],"fy":[{"language":"fy","value":"Karslruhe"},{"language":"fy","value":"Karslr\u00fbhe"}],"mr":[{"language":"mr","value":"\u0915\u093e\u0930\u094d\u0932\u094d\u0938\u0930\u0941\u0939\u0947"}],"lt":[{"language":"lt","value":"Karlsr\u016b\u0117"},{"language":"lt","value":"Karlsruh\u0117"},{"language":"lt","value":"Karlsruhe"},{"language":"lt","value":"Karlsruj\u0117"},{"language":"lt","value":"Karlsr\u016bh\u0117"}],"de":[{"language":"de","value":"Carlsruhe"}],"fa":[{"language":"fa","value":"\u06a9\u0627\u0644\u0632\u0631\u0648\u0647\u0647"}]},"claims":{"P898":[{"mainsnak":{"snaktype":"value","property":"P898","hash":"f5ee98099ef2e3ba588cdf64a660857ab1eecfce","datavalue":{"value":"\u02c8ka\u0281ls\u02cc\u0281u\u02d0\u0259","type":"string"},"datatype":"string"},"type":"statement","qualifiers":{"P407":[{"snaktype":"value","property":"P407","hash":"46bfd327b830f66f7061ea92d1be430c135fa91f","datavalue":{"value":{"entity-type":"item","numeric-id":188,"id":"Q188"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"qualifiers-order":["P407"],"id":"Q1040$4dbe1f70-4804-0e65-af42-00cd784208e6","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P898","hash":"a9bf8a5ba503b63486e205af41cb92bcaa6dbae0","datavalue":{"value":"k\u0250rl\u02c8sru\u025b","type":"string"},"datatype":"string"},"type":"statement","qualifiers":{"P407":[{"snaktype":"value","property":"P407","hash":"d291ddb7cd77c94a7bd709a8395934147e0864fc","datavalue":{"value":{"entity-type":"item","numeric-id":7737,"id":"Q7737"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"qualifiers-order":["P407"],"id":"Q1040$6415bf3f-4829-6ec9-b44c-5f225e365960","rank":"normal"}],"P1036":[{"mainsnak":{"snaktype":"value","property":"P1036","hash":"e982ab45f8bd6a4dd644a04fe82d404294afb737","datavalue":{"value":"2--4346436","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$9e12ea56-4d0a-e8fe-f46b-8a818607e045","rank":"normal"}],"P131":[{"mainsnak":{"snaktype":"value","property":"P131","hash":"c8318131d25195c1a1d4190a9ddb710f017bad53","datavalue":{"value":{"entity-type":"item","numeric-id":8165,"id":"Q8165"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$473BE35C-0B3D-447F-82C0-4FD06EA47CA5","rank":"normal"}],"P373":[{"mainsnak":{"snaktype":"value","property":"P373","hash":"bebd0551dde251cf963482d68fcf8cce36ed59e9","datavalue":{"value":"Karlsruhe","type":"string"},"datatype":"string"},"type":"statement","id":"q1040$0ADFD305-E731-46E5-B8CD-09C13C5BD4FD","rank":"normal"}],"P190":[{"mainsnak":{"snaktype":"value","property":"P190","hash":"237d79d24ec7478bb71cf91e673dfadc370ab4fd","datavalue":{"value":{"entity-type":"item","numeric-id":40898,"id":"Q40898"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"f7d7652f298754ea7a5360cf532800f61238ac2f","datavalue":{"value":{"time":"+1955-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q1040$6716a3c9-43b6-6b65-cf44-b8ae02bd884b","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P190","hash":"584c99fe5635685652e22a09115be507f7213372","datavalue":{"value":{"entity-type":"item","numeric-id":41262,"id":"Q41262"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"d70d54479eda7a1fc2b29e1a4bc88cc2fddd09f3","datavalue":{"value":{"time":"+1969-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q1040$32277705-42de-3d8d-f542-29db46e07a7e","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P190","hash":"95d93ffa08dc52d4aa4645a5e5d08cc6dbbc5268","datavalue":{"value":{"entity-type":"item","numeric-id":3646,"id":"Q3646"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"f77e401c3294559f5f96c8fc18f16d1c6aad6e22","datavalue":{"value":{"time":"+1998-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q1040$5541d8fa-4a7f-a7c6-289c-ea6b9748296a","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P190","hash":"611b13e78bcd6bf052b4520539d810fa206415d6","datavalue":{"value":{"entity-type":"item","numeric-id":83404,"id":"Q83404"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"ed730e4c258f94d6bf2b86c50e53de07ad209ae2","datavalue":{"value":{"time":"+1992-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q1040$c7467b86-488b-6a05-8dc5-8088c87242a7","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P190","hash":"2efb15327929c81f336952085fbad88fc5972db9","datavalue":{"value":{"entity-type":"item","numeric-id":2814,"id":"Q2814"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"cb8638f8586a74f9f8254deab9fdde682330a924","datavalue":{"value":{"time":"+1987-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q1040$C3EC2496-E79F-471E-97DD-248193439D69","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P190","hash":"1c6bdcb5c8497f25026c21aa841dea29daff7056","datavalue":{"value":{"entity-type":"item","numeric-id":643919,"id":"Q643919"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"668b394a513775cba820ee71742b7b8e50cd4dcc","datavalue":{"value":{"time":"+1998-10-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":10,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q1040$CBC78461-00AC-4F93-B26A-20F186E5E4E7","rank":"normal"}],"P439":[{"mainsnak":{"snaktype":"value","property":"P439","hash":"115cec4db4b7230d187adac206076114a8844a39","datavalue":{"value":"08212000","type":"string"},"datatype":"external-id"},"type":"statement","id":"q1040$B2B4EEF2-DE28-4111-93F8-0D30D125B99D","rank":"normal","references":[{"hash":"fa278ebfc458360e5aed63d5058cca83c46134f1","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"e4f6d9441d0600513c4533c672b5ab472dc73694","datavalue":{"value":{"entity-type":"item","numeric-id":328,"id":"Q328"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P41":[{"mainsnak":{"snaktype":"value","property":"P41","hash":"fc10149dbaa205f7c1225bf26a8cf8a6495dc115","datavalue":{"value":"Flagge Karlsruhe.svg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"q1040$1D714C4C-9E6D-4E7C-B044-F699CFD31A88","rank":"normal","references":[{"hash":"c456dc5cd2117249948c288206ff3f8b1bf574f0","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"e17507043402fe54ae6c4d65cc51f46cec987de9","datavalue":{"value":{"entity-type":"item","numeric-id":8449,"id":"Q8449"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P94":[{"mainsnak":{"snaktype":"value","property":"P94","hash":"405e0c9330070d8cc824dc694f87c14cb51bc8ae","datavalue":{"value":"Coat of arms de-bw Karlsruhe.svg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"q1040$FE6E04BD-CFB1-4CE2-8235-C93C7B345FD5","rank":"normal","references":[{"hash":"c456dc5cd2117249948c288206ff3f8b1bf574f0","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"e17507043402fe54ae6c4d65cc51f46cec987de9","datavalue":{"value":{"entity-type":"item","numeric-id":8449,"id":"Q8449"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P281":[{"mainsnak":{"snaktype":"value","property":"P281","hash":"7d34de349f1e29bf29974aad6eba0ab5a312666f","datavalue":{"value":"76229","type":"string"},"datatype":"string"},"type":"statement","id":"q1040$8AA74BEE-3A46-4FE5-8378-DC3FBCB39606","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"c4f25f0d156b02b3ecce2c14201d58548d5dee14","datavalue":{"value":"76131","type":"string"},"datatype":"string"},"type":"statement","id":"q1040$0F769BFB-421E-4B48-9AEB-70E136E93CB4","rank":"normal","references":[{"hash":"d5847b9b6032aa8b13dae3c2dfd9ed5d114d21b3","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"5a343e7e758a4282a01316d3e959b6e653b767fc","datavalue":{"value":{"entity-type":"item","numeric-id":11920,"id":"Q11920"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]},{"mainsnak":{"snaktype":"value","property":"P281","hash":"b33da95e0cda95f38fa0013217d277cb95430a48","datavalue":{"value":"76137","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$4459f78e-4192-ecbf-73a8-c2c3d1317e52","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"d2c941f5ec17914a2b91db6e983ab29a709621e0","datavalue":{"value":"76133","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$bc5fec34-4e03-bf5f-40d8-f1126334f747","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"3cbd147d5eae234daff829f82c4a7894ad05366b","datavalue":{"value":"76135","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$12368da8-474a-fc7e-8ae8-da7d44ea6add","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"fd377d7c6acc3450f4bd379c1d2beea3cdc97227","datavalue":{"value":"76139","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$b2a4daa3-4c1c-7ce7-ab8b-8e7dff5377a4","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"1aedb5c1fce0c671cb6c06d69cc1959f233128e5","datavalue":{"value":"76149","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$6f28e58c-458c-4bb4-c89e-736635123491","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"d171f94ee527488650f6d89edd272a9420c34052","datavalue":{"value":"76199","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$db59dd14-4e65-fc84-48ce-55178e439f1f","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"ec7beccb1e4a04b169b01e0ecc2e6c0bcd8748ff","datavalue":{"value":"76185","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$efe0cc12-46f7-d8c4-223e-3dd2cf1b05dc","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"b60d68c35db525712288de0c75c726670d2c9a37","datavalue":{"value":"76187","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$d7922b57-4f74-d49b-0915-77541692ca8d","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"333ee0af27dcc29c98d47e586ed4a08d02c840e3","datavalue":{"value":"76189","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$e74e341b-4e2d-98c4-6c5f-689fc8e98263","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"8bd03e1a5c5854307a6bff9851a95d64ba3cf68a","datavalue":{"value":"76227","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$1261bbf0-4556-1afe-a451-29965e16d027","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P281","hash":"9910e0707256197c01ddba6acd706258ea5a7e80","datavalue":{"value":"76228","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$9692a5f3-4b21-b5da-4236-01278ff0ad95","rank":"normal"}],"P18":[{"mainsnak":{"snaktype":"value","property":"P18","hash":"497622eb192c676312486cd055a90b013803c715","datavalue":{"value":"Karlsruhe town centre air.jpg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"q1040$3CF0D36A-2629-4F84-A2F7-F2BB13BE608D","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P18","hash":"4d3b25f1a6d4adfe1f2b62a0477921dd8343dd70","datavalue":{"value":"Karlsruhe-Schloss-meph666-2005-Apr-22.jpg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"Q1040$1151f1f0-498b-baed-aa56-4ff62ee61994","rank":"preferred"}],"P421":[{"mainsnak":{"snaktype":"value","property":"P421","hash":"53ab731bb59fd06a123a2d042d206bd52199e918","datavalue":{"value":{"entity-type":"item","numeric-id":6655,"id":"Q6655"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P1264":[{"snaktype":"value","property":"P1264","hash":"5a71e6a321809311ea1a1998db98d29c34b5226d","datavalue":{"value":{"entity-type":"item","numeric-id":1777301,"id":"Q1777301"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"qualifiers-order":["P1264"],"id":"q1040$648C1873-2589-4F0B-903F-A87E26B8F210","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P421","hash":"2f5d88b20f122f125387179df70f54990074d5a8","datavalue":{"value":{"entity-type":"item","numeric-id":6723,"id":"Q6723"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P1264":[{"snaktype":"value","property":"P1264","hash":"bbce95c43e978fcfb784754a6507030a95cf31f3","datavalue":{"value":{"entity-type":"item","numeric-id":36669,"id":"Q36669"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"qualifiers-order":["P1264"],"id":"Q1040$7C645CC0-AAE5-4331-A473-CF48F7ED2462","rank":"normal"}],"P473":[{"mainsnak":{"snaktype":"value","property":"P473","hash":"f25b0dd537eb300ef901c66688a2a0b62f409fe3","datavalue":{"value":"721","type":"string"},"datatype":"string"},"type":"statement","id":"q1040$49A2C301-CDCB-47B9-8140-AD7124E3571B","rank":"normal"}],"P242":[{"mainsnak":{"snaktype":"value","property":"P242","hash":"a264455f307ede1cf1c8781726118e7d67b8ae66","datavalue":{"value":"Baden-W\u00fcrttemberg KA (town).svg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"q1040$392A53ED-B173-44C0-87B9-3CECF79B47BA","rank":"normal","references":[{"hash":"d5847b9b6032aa8b13dae3c2dfd9ed5d114d21b3","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"5a343e7e758a4282a01316d3e959b6e653b767fc","datavalue":{"value":{"entity-type":"item","numeric-id":11920,"id":"Q11920"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P625":[{"mainsnak":{"snaktype":"value","property":"P625","hash":"67d91b4777b651a49d287273799e545839beca38","datavalue":{"value":{"latitude":49.013888888889,"longitude":8.4041666666667,"altitude":null,"precision":0.00027777777777778,"globe":"http://www.wikidata.org/entity/Q2"},"type":"globecoordinate"},"datatype":"globe-coordinate"},"type":"statement","id":"q1040$514B7998-911E-4539-A55D-122295BC93B3","rank":"normal","references":[{"hash":"288ab581e7d2d02995a26dfa8b091d96e78457fc","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"6a164248fc96bfa583bbb495cb63ae6401ec203c","datavalue":{"value":{"entity-type":"item","numeric-id":206855,"id":"Q206855"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]},{"mainsnak":{"snaktype":"value","property":"P625","hash":"847a79a4805d47bdd8f350fb15d61f6f348a79c5","datavalue":{"value":{"latitude":49.00937,"longitude":8.40444,"altitude":null,"precision":1.0e-5,"globe":"http://www.wikidata.org/entity/Q2"},"type":"globecoordinate"},"datatype":"globe-coordinate"},"type":"statement","id":"Q1040$720571B4-2BF0-4C82-B8A8-537B8D18BBF9","rank":"normal"}],"P138":[{"mainsnak":{"snaktype":"value","property":"P138","hash":"b01452aa8981bca9f72a93cca5e127cc1eb8cc5a","datavalue":{"value":{"entity-type":"item","numeric-id":61083,"id":"Q61083"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"q1040$1196987a-410f-dd21-3120-aa349b86577d","rank":"normal"}],"P856":[{"mainsnak":{"snaktype":"value","property":"P856","hash":"8e76e82686c63a2bf3d8808c7c59558d5d80bd79","datavalue":{"value":"https://www.karlsruhe.de/","type":"string"},"datatype":"url"},"type":"statement","qualifiers":{"P407":[{"snaktype":"value","property":"P407","hash":"46bfd327b830f66f7061ea92d1be430c135fa91f","datavalue":{"value":{"entity-type":"item","numeric-id":188,"id":"Q188"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}],"P813":[{"snaktype":"value","property":"P813","hash":"0a4658c4948753a9397bfc6e63b1db12a7b6ef33","datavalue":{"value":{"time":"+2017-05-18T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P407","P813"],"id":"Q1040$0630ACD2-4A49-4ED3-9014-C053877838AE","rank":"normal"}],"P948":[{"mainsnak":{"snaktype":"value","property":"P948","hash":"7795e9ed583131635f1ddb52484d732f414a098c","datavalue":{"value":"Karlsruhe banner.jpg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"Q1040$23A284EC-E6C4-438A-9618-597C25193B92","rank":"normal"}],"P31":[{"mainsnak":{"snaktype":"value","property":"P31","hash":"78dd245533fad75400b2678da46c8d12ca8a3020","datavalue":{"value":{"entity-type":"item","numeric-id":2327515,"id":"Q2327515"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$bd05aa70-415a-6b80-bb0b-d7fdea9a6f3f","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P31","hash":"1af62c39c83b583e317a43f838058583f06f38aa","datavalue":{"value":{"entity-type":"item","numeric-id":1549591,"id":"Q1549591"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"963e83f6767b00a7ddcfd1c6df9a7332812adc01","datavalue":{"value":{"time":"+1901-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q1040$aabaf39f-46cc-6c55-2450-1086f9fff2ef","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P31","hash":"4e6beb54b615d48c5239f20931f0d68409834ac2","datavalue":{"value":{"entity-type":"item","numeric-id":262166,"id":"Q262166"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$EE5BC03B-0E1F-422C-B99D-4B71B38D9B8B","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P31","hash":"b6683417a21589f263d22172790792752c60785e","datavalue":{"value":{"entity-type":"item","numeric-id":253030,"id":"Q253030"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P642":[{"snaktype":"value","property":"P642","hash":"e7a622cf70d54d88673529963073cc9c12abac99","datavalue":{"value":{"entity-type":"item","numeric-id":448731,"id":"Q448731"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"qualifiers-order":["P642"],"id":"Q1040$9d608d41-45cb-f8c6-4fca-289f1e4c5885","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P31","hash":"1d2d976b06b7bb208fadcff54975cc04295b93c0","datavalue":{"value":{"entity-type":"item","numeric-id":515,"id":"Q515"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$274714A0-74F1-4240-ABA3-34DA3A829789","rank":"normal","references":[{"hash":"2e2dae857a990c95a2d97844b1c430ce524e781e","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"8d91599ef83d939fb9fc533162e61ff49927e0bb","datavalue":{"value":"https://doksite.de/map/?loc=Karlsruhe","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P31","hash":"1acb1b305c510103d3fc0a3f811370b26d022f82","datavalue":{"value":{"entity-type":"item","numeric-id":22865,"id":"Q22865"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$88445f13-46c8-1ea4-8bd9-c410deb13362","rank":"normal","references":[{"hash":"fe6c848c588f5d85b7e55efa1ae4a571ace956bd","snaks":{"P1629":[{"snaktype":"value","property":"P1629","hash":"44041b8a42d046308dc5cf2407817b3aa2845d6b","datavalue":{"value":{"entity-type":"item","numeric-id":2333952,"id":"Q2333952"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P1629"]}]}],"P910":[{"mainsnak":{"snaktype":"value","property":"P910","hash":"df9fe972a5c6f55aa047437e20838065fd4d9dce","datavalue":{"value":{"entity-type":"item","numeric-id":7323072,"id":"Q7323072"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$71040020-93D6-42B7-826E-1FE1A0CC18C5","rank":"normal"}],"P982":[{"mainsnak":{"snaktype":"value","property":"P982","hash":"b2663f18cf380aa9c6edabcb1c66b3f90881788e","datavalue":{"value":"8b0c3b31-c1fd-48f4-b13d-86a8c8477e3e","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$5CEEB7CC-FF07-4270-9E26-9466242852F1","rank":"normal","references":[{"hash":"706208b3024200fd0a39ad499808dd0d98d74065","snaks":{"P248":[{"snaktype":"value","property":"P248","hash":"623cc8f0e2f65afe4d66b91962d354a2f3aa9a27","datavalue":{"value":{"entity-type":"item","numeric-id":14005,"id":"Q14005"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P248"]}]}],"P646":[{"mainsnak":{"snaktype":"value","property":"P646","hash":"ac67591076e05320199265acb534367996000153","datavalue":{"value":"/m/0qb1z","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$6C801C86-3FDD-47AA-9A7A-92817112039A","rank":"normal","references":[{"hash":"2b00cb481cddcac7623114367489b5c194901c4a","snaks":{"P248":[{"snaktype":"value","property":"P248","hash":"a94b740202b097dd33355e0e6c00e54b9395e5e0","datavalue":{"value":{"entity-type":"item","numeric-id":15241312,"id":"Q15241312"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}],"P577":[{"snaktype":"value","property":"P577","hash":"fde79ecb015112d2f29229ccc1ec514ed3e71fa2","datavalue":{"value":{"time":"+2013-10-28T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"snaks-order":["P248","P577"]}]}],"P395":[{"mainsnak":{"snaktype":"value","property":"P395","hash":"06b243ec35aa62735f00a278decc81eb3e0c286a","datavalue":{"value":"KA","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$8ED6ABCE-C62B-4840-B1DF-63EB32A9BB65","rank":"normal","references":[{"hash":"9a24f7c0208b05d6be97077d855671d1dfdbc0dd","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"d38375ffe6fe142663ff55cd783aa4df4301d83d","datavalue":{"value":{"entity-type":"item","numeric-id":48183,"id":"Q48183"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P227":[{"mainsnak":{"snaktype":"value","property":"P227","hash":"6b8fee99ba2fe54bb9a78413661d7beddfbe2306","datavalue":{"value":"4029713-5","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$1FADB83F-DDF0-48AB-AF14-75FD2D5323EA","rank":"normal","references":[{"hash":"9a24f7c0208b05d6be97077d855671d1dfdbc0dd","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"d38375ffe6fe142663ff55cd783aa4df4301d83d","datavalue":{"value":{"entity-type":"item","numeric-id":48183,"id":"Q48183"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P6":[{"mainsnak":{"snaktype":"value","property":"P6","hash":"9d848fd46fafe1033229a1e8e782a45f8490dbd1","datavalue":{"value":{"entity-type":"item","numeric-id":107280,"id":"Q107280"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"4cc7e0ac8dc3736a5532569328b04b1e8868527b","datavalue":{"value":{"time":"+1998-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"60e2cec551b847edb9e09dc2a8d73357f95a09f7","datavalue":{"value":{"time":"+2013-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$d9682582-4eac-8df1-ddef-74753661dbe4","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P6","hash":"bb6d2b51df97cc98756327a9ed3d176c746cabd6","datavalue":{"value":{"entity-type":"item","numeric-id":1466862,"id":"Q1466862"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"cfd2f4123560f47337671855cef2c1d993492fc0","datavalue":{"value":{"time":"+1986-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"a10ccc5bca13ba5cd317ca152b4246243c7e4762","datavalue":{"value":{"time":"+1998-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$8e546f8f-4182-4e66-e1ad-2be238379dd8","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P6","hash":"9d98839429e3bd8e8d7f1f0f340bc37e55b79562","datavalue":{"value":{"entity-type":"item","numeric-id":2038537,"id":"Q2038537"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"d986f978ba3b2f4c5c5ebd353ab0522fbadfbf94","datavalue":{"value":{"time":"+1970-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"f60ee2d7e1ddeffadcaf37a3910ea836d8b5dc9c","datavalue":{"value":{"time":"+1986-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$7ccaf131-46be-3ac0-747f-0fdfedcf842d","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P6","hash":"8228a24d54bab67b60e672812a97f862b7fed9b7","datavalue":{"value":{"entity-type":"item","numeric-id":1561590,"id":"Q1561590"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"e7d377bd4aba01989d215b24a7aad0a268c24878","datavalue":{"value":{"time":"+1952-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"548f09995740af9d24e00e5ee47d1abd3aaeb495","datavalue":{"value":{"time":"+1970-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$f78c5470-4272-289f-4b40-7b1d790cce08","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P6","hash":"d34102b5d860f01128529f2ae5605842ef109309","datavalue":{"value":{"entity-type":"item","numeric-id":1462576,"id":"Q1462576"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"d95ee5d64c6f16e180dbc6d166cddc82f2af8f9c","datavalue":{"value":{"time":"+1947-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"c97c08884409a71d338d77222ebf977d31f2c0bc","datavalue":{"value":{"time":"+1952-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$97efe8ca-4c1b-b320-2975-475650ed8360","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P6","hash":"b289e84acf28c4986bcaf1f0e514f5349cf43e18","datavalue":{"value":{"entity-type":"item","numeric-id":1270890,"id":"Q1270890"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"07e674d184213eb3c2a7bc4d91a7a554193ab80e","datavalue":{"value":{"time":"+1718-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"0a90e9b891ee605c5bb2dad7587005ba3cc19e29","datavalue":{"value":{"time":"+1720-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$42ea4206-4c60-f563-ee3a-aac66f48bfdf","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P6","hash":"35173b8486e49d98fcd3f01f5912f554fecd0982","datavalue":{"value":{"entity-type":"item","numeric-id":1443774,"id":"Q1443774"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"a919bb896faea5469482d24e926c19587c22ea2e","datavalue":{"value":{"time":"+2013-03-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"Q1040$9496068a-4225-22c3-7f25-653f1aa3a284","rank":"preferred"}],"P605":[{"mainsnak":{"snaktype":"value","property":"P605","hash":"79f99e93bec87a2fccca82455d3ef719b29c1876","datavalue":{"value":"DE122","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$10f1b9f9-42c7-6b52-bef1-092a2c4ce917","rank":"normal"}],"P17":[{"mainsnak":{"snaktype":"value","property":"P17","hash":"ef7247b704f4ec0a7df39e83306bb388fea17d5f","datavalue":{"value":{"entity-type":"item","numeric-id":183,"id":"Q183"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"13eff71ba054ad192e12acddfd4ee6725bc28ba9","datavalue":{"value":{"time":"+1990-10-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580"],"id":"q1040$C609BA74-8433-47FE-8177-6519269875BD","rank":"preferred"},{"mainsnak":{"snaktype":"value","property":"P17","hash":"3912570fabcfa6f9f41e1c53907ee233225c7211","datavalue":{"value":{"entity-type":"item","numeric-id":713750,"id":"Q713750"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"2ed93c5224a3e784bd15d1eb7c47b4e8ad916a12","datavalue":{"value":{"time":"+1949-05-23T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"57c79b0a475f1529c43a42ad1c80dea59b72bc1a","datavalue":{"value":{"time":"+1990-10-02T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$fea532ba-48ed-a78b-57d4-aa021edb992d","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P17","hash":"f07e6efb1952c0a8e1aeab32299b781d428cf237","datavalue":{"value":{"entity-type":"item","numeric-id":43287,"id":"Q43287"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P580":[{"snaktype":"value","property":"P580","hash":"041bf5977a997ea973304c8848fd973c48051a6b","datavalue":{"value":{"time":"+1870-01-18T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P582":[{"snaktype":"value","property":"P582","hash":"06d1d8a9173fc86b8c5eba5320ca4e4baf10ae46","datavalue":{"value":{"time":"+1919-11-09T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P580","P582"],"id":"Q1040$7eb1a9bb-4517-785e-0f96-1fffc00d3082","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P17","hash":"ef7247b704f4ec0a7df39e83306bb388fea17d5f","datavalue":{"value":{"entity-type":"item","numeric-id":183,"id":"Q183"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$94A57F6E-527D-41E6-8751-20A787A2BC48","rank":"normal"}],"P1082":[{"mainsnak":{"snaktype":"value","property":"P1082","hash":"5bfa9b7ab94c5362ba624dabb927ab9f07678e46","datavalue":{"value":{"amount":"+296033","unit":"1","upperBound":"+296033","lowerBound":"+296033"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"d071256bb4b9260491239bfad2cc561ad8bf870c","datavalue":{"value":{"time":"+2012-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$1a44ad74-499f-26cc-3f87-4f8c858e59d1","rank":"normal","references":[{"hash":"f17e98a5210f8d8063390bb6f665c8eb28c0bafc","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"678e0daea1577cef950d463661288d520bce8d64","datavalue":{"value":{"entity-type":"item","numeric-id":764739,"id":"Q764739"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"f0ba3286637e66fe8eb08c16240ff600c5ae5277","datavalue":{"value":{"amount":"+294761","unit":"1","upperBound":"+294761","lowerBound":"+294761"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"d079326e3ce3c56de50885e3305a2d316cc91861","datavalue":{"value":{"time":"+2010-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$f0dc7b52-49d7-df74-c64e-7956da403f45","rank":"normal","references":[{"hash":"f17e98a5210f8d8063390bb6f665c8eb28c0bafc","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"678e0daea1577cef950d463661288d520bce8d64","datavalue":{"value":{"entity-type":"item","numeric-id":764739,"id":"Q764739"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"fa913b2c79e98325624521cd7cb795bb11a46745","datavalue":{"value":{"amount":"+291959","unit":"1","upperBound":"+291959","lowerBound":"+291959"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"c7ff7c0c56ed9a06045dfdea5c9d98d0494714d0","datavalue":{"value":{"time":"+2009-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$8ffe6404-4c06-cfea-1bac-dbe4966e45e9","rank":"normal","references":[{"hash":"f17e98a5210f8d8063390bb6f665c8eb28c0bafc","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"678e0daea1577cef950d463661288d520bce8d64","datavalue":{"value":{"entity-type":"item","numeric-id":764739,"id":"Q764739"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"96230ddcae5d3743776ea6eacb409017c445df07","datavalue":{"value":{"amount":"+299103","unit":"1","upperBound":"+299103","lowerBound":"+299103"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"1568911fac5843944dbf80f171ebea12f51090e5","datavalue":{"value":{"time":"+2013-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$bcfaa882-4a9f-0b14-9a5b-9cc537d1df54","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"c5e8857100bd6af9895f082c572fa00d231a6f82","datavalue":{"value":{"amount":"+300711","unit":"1","upperBound":"+300711","lowerBound":"+300711"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"73d6a2774ad3dff7dc2d301d2e1193b716d635fb","datavalue":{"value":{"time":"+2011-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$b099f3d0-47fe-d7f1-a918-10971713b04b","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"9335dcda794c56ee8db3e748f9aa484c94ce97fb","datavalue":{"value":{"amount":"+278558","unit":"1","upperBound":"+278558","lowerBound":"+278558"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"2abb6a19fe3eb0b1aeeb3564ed0e868376f04473","datavalue":{"value":{"time":"+2000-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$7f379782-495c-f2b2-59a2-fa5d8cf7842e","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"7852226caa8e0501eda9feac84eb0e86fb86e96d","datavalue":{"value":{"amount":"+275061","unit":"1","upperBound":"+275061","lowerBound":"+275061"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"62a8da970c5a0271b1d724dc95a3a9814de621af","datavalue":{"value":{"time":"+1990-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$3e2949e4-41ec-542f-9ec5-8adcc5898c35","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"b91acad9b5e687b2c88c054f8fe1391bca451ba9","datavalue":{"value":{"amount":"+265077","unit":"1","upperBound":"+265077","lowerBound":"+265077"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"cf188bacd7a69f7b252a0b560b344821369faca4","datavalue":{"value":{"time":"+1961-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$1591C226-9F2B-4D6E-AACA-95BCC26EA22F","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"83e6253cb08072c833d8cafc9f15a729097b3021","datavalue":{"value":{"amount":"+272113","unit":"1","upperBound":"+272113","lowerBound":"+272113"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"551a2e35b409501954dd3d799032e9a722a89cee","datavalue":{"value":{"time":"+1962-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$F96E9696-B793-4503-86C4-E3CF6837370D","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"798c6df5c2f7a2d0cd75f2fc151539048295fc03","datavalue":{"value":{"amount":"+274628","unit":"1","upperBound":"+274628","lowerBound":"+274628"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"53a46a3790dcd42563d6d608efb7c1c74e18f00b","datavalue":{"value":{"time":"+1963-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$9771C206-553A-4EF8-A564-49629FA9C2A0","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"6487e8a04453149772680446b1becc669ad50a19","datavalue":{"value":{"amount":"+277823","unit":"1","upperBound":"+277823","lowerBound":"+277823"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"5a483b958833d009f585d47002c09aa5d27c4fe8","datavalue":{"value":{"time":"+1964-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$41587444-653C-4425-8529-34172AA762FD","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"92fe502211d1f4ae80ec61dbd5f642c2ad50911e","datavalue":{"value":{"amount":"+280001","unit":"1","upperBound":"+280001","lowerBound":"+280001"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"90900fad3e35a8dc48def62576233086e867b9be","datavalue":{"value":{"time":"+1965-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$9903D0C8-58B4-4169-92EF-B1B308A6DF80","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"6beeb978a198222857efe7e9ec36e819fd4761d1","datavalue":{"value":{"amount":"+280968","unit":"1","upperBound":"+280968","lowerBound":"+280968"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"c780df5a2df81db054baa9127d659f572edec932","datavalue":{"value":{"time":"+1966-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$E63F7D06-EEB1-4D12-9301-C5EEC3692B43","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"6221eefe0ed5323a23b25f55aec6328633202124","datavalue":{"value":{"amount":"+281093","unit":"1","upperBound":"+281093","lowerBound":"+281093"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"349c727ba8f774d5e5eb112af2878787c9d92cfe","datavalue":{"value":{"time":"+1967-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$FEE4E828-F8D1-4B01-9323-28E6A3FDE70C","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"d10d43b0b310c4d7934fcd7c5dcfa0b784bb53c9","datavalue":{"value":{"amount":"+284060","unit":"1","upperBound":"+284060","lowerBound":"+284060"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"32ee33241fcab0a1e3f1287b9f23598bf659f8bc","datavalue":{"value":{"time":"+1968-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$D8AEA5DD-F331-4710-B5EC-07294465BC3A","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"9fd0e6d1828884a77d424cf282c89422fb3739a8","datavalue":{"value":{"amount":"+286085","unit":"1","upperBound":"+286085","lowerBound":"+286085"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"0d45bd9da6d647247858a5728a018c88a70e97d0","datavalue":{"value":{"time":"+1969-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$7B6A206C-78D4-4A2B-91C3-425C33BDAE57","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"25c4e6ffba859017e89d062db58fc8ea35bd5992","datavalue":{"value":{"amount":"+287452","unit":"1","upperBound":"+287452","lowerBound":"+287452"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"1f0aba630e6875311dde9e6abe1d4f2a7044baaf","datavalue":{"value":{"time":"+1970-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$0A2BA5C9-9667-4328-AE94-A297FC2DA2E3","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"4e9cea2e464c792a5ee850cd9df26e3f6c1405c7","datavalue":{"value":{"amount":"+287918","unit":"1","upperBound":"+287918","lowerBound":"+287918"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"1352bdeb838297fa29fe19919ed606650db92b6f","datavalue":{"value":{"time":"+1971-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$D791D68E-1A7A-4DA5-A022-9CCF9D83E3C4","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"fcecffa7ba94e59dc96e1edaa2e2363b196e0a95","datavalue":{"value":{"amount":"+287539","unit":"1","upperBound":"+287539","lowerBound":"+287539"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"6f192c28c2d81c283fe0407115207dd895a0e636","datavalue":{"value":{"time":"+1972-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$CB3A8016-157B-4423-83E3-50E87F27A4CE","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"93ae6f20b12fc3480c92b2e603e5b92cf327f92f","datavalue":{"value":{"amount":"+285822","unit":"1","upperBound":"+285822","lowerBound":"+285822"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"0d07f2b99c3af88b009e6b1ee95c7729631be56c","datavalue":{"value":{"time":"+1973-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$D41FB742-128D-461A-B843-0DBA97CE359A","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"c5980febb74555e7284f6afc925eb4ed3a461ea1","datavalue":{"value":{"amount":"+283543","unit":"1","upperBound":"+283543","lowerBound":"+283543"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"c0037c072228c3fcb30ee9513ab584c56a6aeb06","datavalue":{"value":{"time":"+1974-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$89EC5033-BF63-4583-BF72-B3E60EAE04D5","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"bf6ce449fa88671e202505647165ab6b2243edb2","datavalue":{"value":{"amount":"+280448","unit":"1","upperBound":"+280448","lowerBound":"+280448"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"8d820f875d7ded06e0ac38d88b44df5cd4b81d24","datavalue":{"value":{"time":"+1975-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$D925C50E-1356-4F33-A586-6AEE0E72D9B0","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"02f8f047da3c227a8772bbe97943cf5f7fa9626a","datavalue":{"value":{"amount":"+276620","unit":"1","upperBound":"+276620","lowerBound":"+276620"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"6012a7d5f709c5c3e8680b0765aba4077bed659b","datavalue":{"value":{"time":"+1976-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$481FCA66-34D7-41D8-9640-B3BED262B0F5","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"a60c6d3bfced83f4c25f379f42f2c73faaff0bef","datavalue":{"value":{"amount":"+275828","unit":"1","upperBound":"+275828","lowerBound":"+275828"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"010bb7560bca2a5faa415eaed2d11fa4ca8e2f8e","datavalue":{"value":{"time":"+1977-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$68449CB7-B6A9-404C-A637-19E1C079BC99","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"763b2ca4156fe2ac318e97b9839e2f371a953b6a","datavalue":{"value":{"amount":"+274058","unit":"1","upperBound":"+274058","lowerBound":"+274058"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"d14d3dec049991718d24d45da626779c143e2b54","datavalue":{"value":{"time":"+1978-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$2B20157C-463B-470A-AFDF-E8B4728FACAF","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"b78b743bdca4149d2be20fce4c8925fa2cb869a6","datavalue":{"value":{"amount":"+271417","unit":"1","upperBound":"+271417","lowerBound":"+271417"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"21ce2394cef40d7e380a249ee1911d6efa38d1af","datavalue":{"value":{"time":"+1979-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$B49D440B-29C0-4AAC-B1C2-06153F891264","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"d8d08026e7aba029343fc7db78921dcd484978e1","datavalue":{"value":{"amount":"+271892","unit":"1","upperBound":"+271892","lowerBound":"+271892"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"9643cd75daf109f2125771d1eee67f2935e0e6e0","datavalue":{"value":{"time":"+1980-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$D7A04894-26B2-402A-9BDD-9913FCF1AA0E","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"364a221c71787745135a002808754400b4200f30","datavalue":{"value":{"amount":"+271877","unit":"1","upperBound":"+271877","lowerBound":"+271877"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"8b746f6e4a9bbf9e6cdc090817168af32f970465","datavalue":{"value":{"time":"+1981-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$9A354C97-D51C-433B-AB7E-152FDB88817F","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"017f1aec39806e4423cd61736dc7235c0597561b","datavalue":{"value":{"amount":"+271236","unit":"1","upperBound":"+271236","lowerBound":"+271236"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"24472fb4f33126703ee38dcc218c661b07535e1a","datavalue":{"value":{"time":"+1982-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$C4D3D931-1230-4C83-9761-C3806618D219","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"f22885dbe8a8533d88d1a8ab10a04e80eb47f13e","datavalue":{"value":{"amount":"+269389","unit":"1","upperBound":"+269389","lowerBound":"+269389"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"1f4575b36bd16a12b6ce37bd18576d2809be2317","datavalue":{"value":{"time":"+1983-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$A3A9B164-0BA6-4B37-8D15-84AB5CEF72CA","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"da68b66b687338cd35ab189b7fa0619c9b2f4277","datavalue":{"value":{"amount":"+269638","unit":"1","upperBound":"+269638","lowerBound":"+269638"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"5e32e62c61c51f9f6a5e8643f35af966de57859d","datavalue":{"value":{"time":"+1984-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$5FAF836B-D883-4A3E-B8FF-FA781929EBDA","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"4fa6a91f52201e522e08f7c4e5fed4947ff6df7d","datavalue":{"value":{"amount":"+268211","unit":"1","upperBound":"+268211","lowerBound":"+268211"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"3e98ddc7e45e738ffb8c443426322278b8d3dbc8","datavalue":{"value":{"time":"+1985-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$45388259-1DB2-4395-AB35-76BC6B9FC0A5","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"868eb3e73692eb61b5d63d569edf99654998012d","datavalue":{"value":{"amount":"+268309","unit":"1","upperBound":"+268309","lowerBound":"+268309"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"fdf9823c2c78f35be683dbabbdc70ae7ae1ea797","datavalue":{"value":{"time":"+1986-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$AB9D1129-CAB3-4ECD-93E5-A76FB652DD83","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"238365c1460efc12ccc345f0545192f32908ebc1","datavalue":{"value":{"amount":"+260591","unit":"1","upperBound":"+260591","lowerBound":"+260591"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"06b481dc59b4e55d0f90a9b81d4f1c42e338c90d","datavalue":{"value":{"time":"+1987-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$AE0D960A-CDAD-43A5-B906-067524C9C6A0","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"f39cdba8c6400a8901d424d31b00839b27f49a57","datavalue":{"value":{"amount":"+265100","unit":"1","upperBound":"+265100","lowerBound":"+265100"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"d2e609f126c696b4a21ddf130156a8b2d7f914ed","datavalue":{"value":{"time":"+1988-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$F860ECD0-C4C4-495A-9091-E96D3626F018","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"1714fb9e97caf9e5b2d2a511a141e4492701a7f1","datavalue":{"value":{"amount":"+270659","unit":"1","upperBound":"+270659","lowerBound":"+270659"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"bf75cabfd52bf42741e5e375fe2cd2ba6a18693a","datavalue":{"value":{"time":"+1989-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$E4F8F65F-DC11-49FE-AC23-B7626E1C0EE5","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"48981c764ad0788857b9ddff9150ee4fe5cae8d7","datavalue":{"value":{"amount":"+278579","unit":"1","upperBound":"+278579","lowerBound":"+278579"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"91e44407bbaf560304e82582230228a3da9501d8","datavalue":{"value":{"time":"+1991-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$291CE2D3-A5AE-4F7A-AD8C-34723F716389","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"9d8c9f695bffb27105d7326effabd5dea65853b4","datavalue":{"value":{"amount":"+279329","unit":"1","upperBound":"+279329","lowerBound":"+279329"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"4552704ecdc9a6e9719c3d80f8b8c3dbfa5c063e","datavalue":{"value":{"time":"+1992-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$51C0CDE5-9728-4232-A730-E83EF0C12E53","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"166e526e9994a6d60e67535244be104dbdaa027a","datavalue":{"value":{"amount":"+277998","unit":"1","upperBound":"+277998","lowerBound":"+277998"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"90c1ec53fb2353d85282cca17898f95b43cc4d63","datavalue":{"value":{"time":"+1993-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$DADD6003-D1DA-4F28-A9C5-27A6C3B8114D","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"d4f65fc5d23251a46c8e741d2347b7c9908f87ac","datavalue":{"value":{"amount":"+277011","unit":"1","upperBound":"+277011","lowerBound":"+277011"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"b7e7105ce8e4bb89a98287b17c8b17d5ac4b5e57","datavalue":{"value":{"time":"+1994-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$6B605702-96EB-4F16-AC85-D7F178BD7FF6","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"2b73aaff32c8f57d38e2704ec604d37b6c7f4406","datavalue":{"value":{"amount":"+275690","unit":"1","upperBound":"+275690","lowerBound":"+275690"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"c09061faa6b6430b874257ac50be630b716a5d4a","datavalue":{"value":{"time":"+1995-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$5D6AB906-3A7C-41D1-AEBE-BE9613288837","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"35e05e15b31898e3cab433daae525c56453cad82","datavalue":{"value":{"amount":"+277191","unit":"1","upperBound":"+277191","lowerBound":"+277191"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"79b724689063bbad8fa6bb1f75c7b0134c3bd386","datavalue":{"value":{"time":"+1996-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$3A4C6D92-C911-42BC-9EB8-FE3F83789BE8","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"7966b7f4c07bd58d127d2084cfd5ed30755d3788","datavalue":{"value":{"amount":"+276571","unit":"1","upperBound":"+276571","lowerBound":"+276571"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"9b8d0cec6ae0bc77abcf6acaac222401c81f350f","datavalue":{"value":{"time":"+1997-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$5DE4B4B3-8E58-4654-B301-60E7687F3243","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"ec338c62190a804d7b5900902b2d58b5cfb09508","datavalue":{"value":{"amount":"+276536","unit":"1","upperBound":"+276536","lowerBound":"+276536"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"54689e87cdcaf5a15252503de7640bab54d4911a","datavalue":{"value":{"time":"+1998-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$715C0492-B8F3-4356-BC1F-A8992551233F","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"75ae33eb7182a857ce905b21d9666069af48bfeb","datavalue":{"value":{"amount":"+277204","unit":"1","upperBound":"+277204","lowerBound":"+277204"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"2c64374fe06f9ab82c26fc46f639526f59a8bcda","datavalue":{"value":{"time":"+1999-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$CC3A1BB2-1D04-48BD-97B6-8302600EDB60","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"0522ee3ec61e911b592111bdcf6868ccbbeb38d0","datavalue":{"value":{"amount":"+279578","unit":"1","upperBound":"+279578","lowerBound":"+279578"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"eb0ebdca65f8a80fa9de530d9f3efd91003c1544","datavalue":{"value":{"time":"+2001-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$43A48C63-F341-4C0F-A537-7FCACD9BD5ED","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"a0110aa34fc9f06713f1efdf1505fb2de76b4ce4","datavalue":{"value":{"amount":"+281334","unit":"1","upperBound":"+281334","lowerBound":"+281334"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"9451da042cf866fedbd9efd17f0324c590c5c5dd","datavalue":{"value":{"time":"+2002-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$DF92F985-31B0-4D46-8307-624667F6A15E","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"f6225f8e23af561329339f3936f9edb9fec61fa6","datavalue":{"value":{"amount":"+282595","unit":"1","upperBound":"+282595","lowerBound":"+282595"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"2c189c7c872983bd10a28e2acfc03f5c3738d76d","datavalue":{"value":{"time":"+2003-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$C2C6C358-B1E4-45B5-B67E-1022721B7A75","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"9a4f0190f1250b7c1c204f215103bc8832e19229","datavalue":{"value":{"amount":"+284163","unit":"1","upperBound":"+284163","lowerBound":"+284163"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"ba4e903e603892e17bd9796e98b823725cab176a","datavalue":{"value":{"time":"+2004-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$BB1E1B59-3172-427B-9DB3-F0787D143BAE","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"e8f86ef031d8bbfc53782774359501ff0f6dbae1","datavalue":{"value":{"amount":"+285263","unit":"1","upperBound":"+285263","lowerBound":"+285263"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"26896c2e3b989e92a54f934df3cb9368a0ce7adb","datavalue":{"value":{"time":"+2005-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$7034BE53-93C9-4308-8ED6-100677EBEAAE","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"10ed8f016fd5d20d148096e8c6f39b2651866e73","datavalue":{"value":{"amount":"+286327","unit":"1","upperBound":"+286327","lowerBound":"+286327"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"a910853afc29ee98ebd8b5f67a02ff08aea299d2","datavalue":{"value":{"time":"+2006-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$F4FBCA5D-FF9D-4F9F-BFEF-DA2E78C2D77E","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"4d1d7b6af63130c11ae47313fe22d94b038a97dd","datavalue":{"value":{"amount":"+288917","unit":"1","upperBound":"+288917","lowerBound":"+288917"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"4b8b0672c38a4a213d3ddf5553b775988e65ace9","datavalue":{"value":{"time":"+2007-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$54453069-2AC3-4FAC-9BA4-C8C96C89DA18","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"c5c39181ef4cb99d7728e5c3ebaf0d67e8ace942","datavalue":{"value":{"amount":"+290736","unit":"1","upperBound":"+290736","lowerBound":"+290736"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"43bdb90ab9b607c864931ed584115ff944210c6b","datavalue":{"value":{"time":"+2008-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$F538F75C-43A2-4253-A33C-EBA3A0AA884F","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"b5d8380e19729fc1719d8a76bb829caf90c80eb2","datavalue":{"value":{"amount":"+300051","unit":"1","upperBound":"+300051","lowerBound":"+300051"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"049bd876bdfea826eaceb6f6056701b74c6d3f08","datavalue":{"value":{"time":"+2014-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$2F54EBD2-EBC8-4647-AB8C-1E9BAD9C9948","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"bc64572af9acf3a1a7db9feb490e85bcd01bad41","datavalue":{"value":{"amount":"+307263","unit":"1","upperBound":"+307264","lowerBound":"+307262"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"dec5f9078af35f77b6f7ea179623d4552bbb6ce1","datavalue":{"value":{"time":"+2015-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$4a58beff-4d91-d11d-f332-b9f6322922a1","rank":"normal","references":[{"hash":"fdcb09a81461631a5d5003ab606e83f951df7168","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"6174c53318b25abeb5ae0c8c003eb77f95f7bb4c","datavalue":{"value":"http://web3.karlsruhe.de/Stadtentwicklung/statistik/pdf/2015/2015-bevoelkerung-jahresbericht.pdf","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P1082","hash":"7909c226a364523635045b02a8db3adc749b1a35","datavalue":{"value":{"amount":"+307755","unit":"1"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"dec5f9078af35f77b6f7ea179623d4552bbb6ce1","datavalue":{"value":{"time":"+2015-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$7abc455d-4385-b41c-a688-bb1941ab38dc","rank":"preferred","references":[{"hash":"fe6c848c588f5d85b7e55efa1ae4a571ace956bd","snaks":{"P1629":[{"snaktype":"value","property":"P1629","hash":"44041b8a42d046308dc5cf2407817b3aa2845d6b","datavalue":{"value":{"entity-type":"item","numeric-id":2333952,"id":"Q2333952"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P1629"]}]}],"P1464":[{"mainsnak":{"snaktype":"value","property":"P1464","hash":"3f05b436914f350b27179292bd9daf5bce6a2c9d","datavalue":{"value":{"entity-type":"item","numeric-id":15069210,"id":"Q15069210"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$82436D65-D8D4-4AE4-88EF-F9B6C67721D8","rank":"normal","references":[{"hash":"d5847b9b6032aa8b13dae3c2dfd9ed5d114d21b3","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"5a343e7e758a4282a01316d3e959b6e653b767fc","datavalue":{"value":{"entity-type":"item","numeric-id":11920,"id":"Q11920"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P1465":[{"mainsnak":{"snaktype":"value","property":"P1465","hash":"d38ccdee21c148b8b60b33a7a212695720b5125e","datavalue":{"value":{"entity-type":"item","numeric-id":6563111,"id":"Q6563111"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$F5BDD5B5-AD8D-4081-AEE6-BAD6BB7DD2E2","rank":"normal","references":[{"hash":"d5847b9b6032aa8b13dae3c2dfd9ed5d114d21b3","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"5a343e7e758a4282a01316d3e959b6e653b767fc","datavalue":{"value":{"entity-type":"item","numeric-id":11920,"id":"Q11920"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P1566":[{"mainsnak":{"snaktype":"value","property":"P1566","hash":"580d2902c29ae4ce261aed9ede6d0194584c8139","datavalue":{"value":"2892794","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$DC29920C-6091-4B4D-8EC0-128C00360810","rank":"normal","references":[{"hash":"64133510dcdf15e7943de41e4835c673fc5d6fe4","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"3439bea208036ec33ec3fba8245410df3efb8044","datavalue":{"value":{"entity-type":"item","numeric-id":830106,"id":"Q830106"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P1792":[{"mainsnak":{"snaktype":"value","property":"P1792","hash":"1fad3c63dd92b3854dcc01469de00c1678fdea38","datavalue":{"value":{"entity-type":"item","numeric-id":6564747,"id":"Q6564747"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$64C41E49-1064-4616-98A6-7B1817C4DDD7","rank":"normal","references":[{"hash":"60cce5d0acf0cc060796196012001192a048d885","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"390e3b73e7cd926978cab220968d0e3451cfb814","datavalue":{"value":{"entity-type":"item","numeric-id":199698,"id":"Q199698"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P1313":[{"mainsnak":{"snaktype":"value","property":"P1313","hash":"0dcd42f1455ddd71978b95ab3050d55b6b1d7f25","datavalue":{"value":{"entity-type":"item","numeric-id":11902879,"id":"Q11902879"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$92f66f66-4ea4-d301-30e0-7a4da888bc15","rank":"normal"}],"P361":[{"mainsnak":{"snaktype":"value","property":"P361","hash":"46231ba420eab9574af3ba181943d35b07db17fb","datavalue":{"value":{"entity-type":"item","numeric-id":8165,"id":"Q8165"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$ebfcde48-4649-1cb5-7473-90c010d6c4e4","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P361","hash":"46630e5801262bf58c2c9d0c81f25a07f6a50f62","datavalue":{"value":{"entity-type":"item","numeric-id":606944,"id":"Q606944"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$7594b4ea-424a-ba55-872a-59467f979e89","rank":"normal"}],"P706":[{"mainsnak":{"snaktype":"value","property":"P706","hash":"e4308697136f9a15c6947e96a97e7db2b226bc4a","datavalue":{"value":{"entity-type":"item","numeric-id":2381438,"id":"Q2381438"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$fee3f5ee-474f-3c19-a484-e9c2b6400ecc","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P706","hash":"a576188611f8d87d2e1762f0c9d32c60f6d8f24c","datavalue":{"value":{"entity-type":"item","numeric-id":22964,"id":"Q22964"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$4d4bbe15-40bf-007e-af46-61d184688e1d","rank":"normal"}],"P1621":[{"mainsnak":{"snaktype":"value","property":"P1621","hash":"361a930c2c67d816aa7c3676a9a27219087eac5a","datavalue":{"value":"Karlsruhe subdivisions.svg","type":"string"},"datatype":"commonsMedia"},"type":"statement","id":"Q1040$152fdf54-480e-9540-28c0-c0bbdf177dec","rank":"normal"}],"P47":[{"mainsnak":{"snaktype":"value","property":"P47","hash":"1bd1a7a56031ae72abb2f527744de51529c24987","datavalue":{"value":{"entity-type":"item","numeric-id":8178,"id":"Q8178"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$c432eec5-43f4-7551-fe09-085e7c025030","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P47","hash":"c90a82c37ccb57e3eddc396e59c0cfd19262776d","datavalue":{"value":{"entity-type":"item","numeric-id":8543,"id":"Q8543"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$b31bb6b0-4434-a1e4-038c-58ad578deef5","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P47","hash":"5ad821ba2b6de1e0a2070bb2af9f46ebbe2ba260","datavalue":{"value":{"entity-type":"item","numeric-id":14893,"id":"Q14893"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$29EA0B97-113A-4184-90C4-13F96A4C5382","rank":"normal"}],"P206":[{"mainsnak":{"snaktype":"value","property":"P206","hash":"c221322b1ffd514b0e7d4db4b14f02d2e59537a4","datavalue":{"value":{"entity-type":"item","numeric-id":584,"id":"Q584"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$2acafd3e-4ee5-4a6d-8ef3-7d12a0ed3905","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P206","hash":"cc05aa3f32614920e30ad0ec75d386f2e360d87a","datavalue":{"value":{"entity-type":"item","numeric-id":562295,"id":"Q562295"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$d2fd9bf9-43f0-0320-ea76-be6472f2bed3","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P206","hash":"90a0efeb2052a50801b99eecea78c4cdbc0cb969","datavalue":{"value":{"entity-type":"item","numeric-id":678064,"id":"Q678064"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$f4a6d655-4761-81b9-ec86-170709338867","rank":"normal"}],"P1376":[{"mainsnak":{"snaktype":"value","property":"P1376","hash":"89451fdbc4a650120c3f7351ed4d6d5863fe6095","datavalue":{"value":{"entity-type":"item","numeric-id":8165,"id":"Q8165"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$8DBD4729-A7D5-43B8-8954-C0B8F4AB1EE4","rank":"normal","references":[{"hash":"ba80e422149aa5a5960bd1f5d046302a640d9ae5","snaks":{"P248":[{"snaktype":"value","property":"P248","hash":"87ab1752eab4772dd71566f74ef682e1c60fc5c3","datavalue":{"value":{"entity-type":"item","numeric-id":8165,"id":"Q8165"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P248"]}]}],"P214":[{"mainsnak":{"snaktype":"value","property":"P214","hash":"7d4320363ed90bbdb7488f2c513c68834ab684c5","datavalue":{"value":"241322215","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$863E9DB8-263A-4AF8-9C5A-965D4C7DB3ED","rank":"normal","references":[{"hash":"9a24f7c0208b05d6be97077d855671d1dfdbc0dd","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"d38375ffe6fe142663ff55cd783aa4df4301d83d","datavalue":{"value":{"entity-type":"item","numeric-id":48183,"id":"Q48183"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P244":[{"mainsnak":{"snaktype":"value","property":"P244","hash":"a4b1446da3a2e5f08de6716d4f36c732e2318f1c","datavalue":{"value":"n79013825","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$99DE97FE-D344-4FC9-A622-D9E724712E05","rank":"normal","references":[{"hash":"9a24f7c0208b05d6be97077d855671d1dfdbc0dd","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"d38375ffe6fe142663ff55cd783aa4df4301d83d","datavalue":{"value":{"entity-type":"item","numeric-id":48183,"id":"Q48183"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P268":[{"mainsnak":{"snaktype":"value","property":"P268","hash":"1ede8c7f6d19cbfbcdd26a0ea0f5426c3d6d71ee","datavalue":{"value":"11945506b","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$1CE6A2A5-4D46-417E-943E-15EB86020758","rank":"normal","references":[{"hash":"5f8cc956bc8d53371d304dd28abf0929735480d7","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"3992f452ebf9d3867991297d2c2c5af7a8858fbf","datavalue":{"value":{"entity-type":"item","numeric-id":20666306,"id":"Q20666306"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}],"P813":[{"snaktype":"value","property":"P813","hash":"97aefaac0484eca89d5fee539362dc40c533824e","datavalue":{"value":{"time":"+2015-08-26T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P248":[{"snaktype":"value","property":"P248","hash":"da30562523b94bc9c043e8ecdf983c520d76fa31","datavalue":{"value":{"entity-type":"item","numeric-id":20666306,"id":"Q20666306"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143","P813","P248"]}]}],"P2163":[{"mainsnak":{"snaktype":"value","property":"P2163","hash":"05ffe4215c08949a2fcf2484ada986568839706b","datavalue":{"value":"1204412","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$7f2e5471-450f-78e4-8bab-2025b90c5c15","rank":"normal"}],"P1375":[{"mainsnak":{"snaktype":"value","property":"P1375","hash":"b57ab231f3dbdaf69149153287126112c1eda835","datavalue":{"value":"000629019","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$691e754e-4cde-7ebc-c41f-7ec8c4cda952","rank":"normal"}],"P935":[{"mainsnak":{"snaktype":"value","property":"P935","hash":"26f1b759f09ecbca8a444dcd41274f99dc5f85b5","datavalue":{"value":"Karlsruhe","type":"string"},"datatype":"string"},"type":"statement","id":"Q1040$BDEE431E-AC8E-4967-8E63-873C0A4A0D16","rank":"normal"}],"P1343":[{"mainsnak":{"snaktype":"value","property":"P1343","hash":"62c309f8296338bd9647a7e758857137f3370310","datavalue":{"value":{"entity-type":"item","numeric-id":2657718,"id":"Q2657718"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","qualifiers":{"P478":[{"snaktype":"value","property":"P478","hash":"ab49dc08d24a45b6e4ca6b5613d63a1756aa3d7f","datavalue":{"value":"5","type":"string"},"datatype":"string"}],"P304":[{"snaktype":"value","property":"P304","hash":"33226c49cc2fa02a98f79822420c4107271c58c7","datavalue":{"value":"326","type":"string"},"datatype":"string"}]},"qualifiers-order":["P478","P304"],"id":"Q1040$943C2882-843E-4E11-AB8C-1DD3F135B19B","rank":"normal","references":[{"hash":"8bc722e6797143f2ba971ee5fedaeb0a331268e6","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"4b9ead388e47f64ea2ecb2b82c5d4f516003a8d5","datavalue":{"value":{"entity-type":"item","numeric-id":1975217,"id":"Q1975217"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P1997":[{"mainsnak":{"snaktype":"value","property":"P1997","hash":"2a01f5fdae954be98bb4fae5d3d3d1ee46fba149","datavalue":{"value":"106073139432990","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$1370eebc-481a-d0cf-de74-46ddda4b20a0","rank":"normal"}],"P2503":[{"mainsnak":{"snaktype":"value","property":"P2503","hash":"e4171a6298dc70dde4d4050e15652728867b1582","datavalue":{"value":"KARUH1JN49EA","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$B8241530-993A-48BA-BDED-C8A8896B9FC1","rank":"normal"}],"P949":[{"mainsnak":{"snaktype":"value","property":"P949","hash":"a0eba65197051c1696417f63888790a769b5c27a","datavalue":{"value":"000974395","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$3F9D41AC-B044-471F-AFA1-450CCCAEF9AE","rank":"normal","references":[{"hash":"a70d0f501acf63bc781259cead9f73790879c83e","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"b14f8dad3de12f83b158a718b20772dc6c65fa0d","datavalue":{"value":{"entity-type":"item","numeric-id":54919,"id":"Q54919"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}],"P813":[{"snaktype":"value","property":"P813","hash":"d4f3d264b7badd664450b52ec0157875b216257f","datavalue":{"value":{"time":"+2016-04-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}],"P854":[{"snaktype":"value","property":"P854","hash":"24fd49a5596c5e386f3a4d764838d741ca59b195","datavalue":{"value":"http://www.viaf.org/viaf/241322215/","type":"string"},"datatype":"url"}]},"snaks-order":["P143","P813","P854"]}]}],"P1667":[{"mainsnak":{"snaktype":"value","property":"P1667","hash":"0950626e980b5d179522082d265cf6d785b9e2a3","datavalue":{"value":"7153374","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$1CDD2FC3-A8C0-4781-9F4B-193B4851DE5D","rank":"normal"}],"P2046":[{"mainsnak":{"snaktype":"value","property":"P2046","hash":"6332c6d75c916eed13117565c4e7e7620fa75a9b","datavalue":{"value":{"amount":"+173.45","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"cf188bacd7a69f7b252a0b560b344821369faca4","datavalue":{"value":{"time":"+1961-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$738F163F-ECE9-4C6D-A625-A76DE5D22FDB","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"c2da4f4b1ce66358ee7dca0ffb51db29102867b7","datavalue":{"value":{"amount":"+173.46","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"53a46a3790dcd42563d6d608efb7c1c74e18f00b","datavalue":{"value":{"time":"+1963-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$EFEE4512-3E62-45E2-81CB-D5A8BFC60644","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"d196eccb4ee75b059793d10b8b71383ad768ab18","datavalue":{"value":{"amount":"+173.47","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"c780df5a2df81db054baa9127d659f572edec932","datavalue":{"value":{"time":"+1966-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$768B5ED0-60D3-4568-964C-4E4267CB6DF3","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"e4dfbbab45d9498e600efd2407c87b387b34f161","datavalue":{"value":{"amount":"+173.48","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"349c727ba8f774d5e5eb112af2878787c9d92cfe","datavalue":{"value":{"time":"+1967-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$CDAF46AF-8EDC-4732-A893-0410400D2DDA","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"aa3c33781c19efd2d283bc9f244fc863fe48f506","datavalue":{"value":{"amount":"+173.49","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"32ee33241fcab0a1e3f1287b9f23598bf659f8bc","datavalue":{"value":{"time":"+1968-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$0AA4A5AF-B61D-4AA8-A678-CCCFF2D64E25","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"e4dfbbab45d9498e600efd2407c87b387b34f161","datavalue":{"value":{"amount":"+173.48","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"0d45bd9da6d647247858a5728a018c88a70e97d0","datavalue":{"value":{"time":"+1969-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$6913BEF9-9BD5-41B8-B20D-92602AA40E1C","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"d196eccb4ee75b059793d10b8b71383ad768ab18","datavalue":{"value":{"amount":"+173.47","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"6f192c28c2d81c283fe0407115207dd895a0e636","datavalue":{"value":{"time":"+1972-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$1EFDFAA6-EF53-4D85-A31F-18938FA1AF0F","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"c2da4f4b1ce66358ee7dca0ffb51db29102867b7","datavalue":{"value":{"amount":"+173.46","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"8d820f875d7ded06e0ac38d88b44df5cd4b81d24","datavalue":{"value":{"time":"+1975-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$5CFEABE3-AB3E-4F4C-9EE1-C6530FAA6049","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"6332c6d75c916eed13117565c4e7e7620fa75a9b","datavalue":{"value":{"amount":"+173.45","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"1f4575b36bd16a12b6ce37bd18576d2809be2317","datavalue":{"value":{"time":"+1983-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$F555ACA6-B76D-4CF0-B300-B8CD0FAF621F","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"0b6e6f6757dd7214adfe3bab0439868615bd5aba","datavalue":{"value":{"amount":"+173.44","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"a209a4e82da2d45e0d86182642ab75890aacd0c1","datavalue":{"value":{"time":"+1990-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$F4381E57-CF15-4F9A-A654-C8FB007976E5","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"d196eccb4ee75b059793d10b8b71383ad768ab18","datavalue":{"value":{"amount":"+173.47","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"b7e7105ce8e4bb89a98287b17c8b17d5ac4b5e57","datavalue":{"value":{"time":"+1994-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$EC7F2754-08B5-47BE-9F41-D74A164BA9FE","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"c2da4f4b1ce66358ee7dca0ffb51db29102867b7","datavalue":{"value":{"amount":"+173.46","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"9b8d0cec6ae0bc77abcf6acaac222401c81f350f","datavalue":{"value":{"time":"+1997-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$76E17D6B-BF24-4620-A756-55BC6F0EB955","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"aa3c33781c19efd2d283bc9f244fc863fe48f506","datavalue":{"value":{"amount":"+173.49","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"54689e87cdcaf5a15252503de7640bab54d4911a","datavalue":{"value":{"time":"+1998-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$DA947CB0-1CD7-47E7-8496-53FFB176B4E0","rank":"normal","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"c2da4f4b1ce66358ee7dca0ffb51db29102867b7","datavalue":{"value":{"amount":"+173.46","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"e701093471a4f4e2e08f4a95fb504eb6270222cf","datavalue":{"value":{"time":"+2000-00-00T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$37F84617-D331-4CC7-960B-213412EE6CA5","rank":"preferred","references":[{"hash":"8ddef26df0f55a9c2aed9974b5803573ea665cb5","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"9b356795d1f5c89035115a2c13fea99900f04762","datavalue":{"value":"http://www.statistik.baden-wuerttemberg.de/BevoelkGebiet/Bevoelkerung/01515020.tab?R=GS212000","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]},{"mainsnak":{"snaktype":"value","property":"P2046","hash":"c2da4f4b1ce66358ee7dca0ffb51db29102867b7","datavalue":{"value":{"amount":"+173.46","unit":"http://www.wikidata.org/entity/Q712226"},"type":"quantity"},"datatype":"quantity"},"type":"statement","qualifiers":{"P585":[{"snaktype":"value","property":"P585","hash":"dec5f9078af35f77b6f7ea179623d4552bbb6ce1","datavalue":{"value":{"time":"+2015-12-31T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"},"datatype":"time"}]},"qualifiers-order":["P585"],"id":"Q1040$72bd062a-4474-3c7d-192b-2a03ab2c3e83","rank":"preferred","references":[{"hash":"fe6c848c588f5d85b7e55efa1ae4a571ace956bd","snaks":{"P1629":[{"snaktype":"value","property":"P1629","hash":"44041b8a42d046308dc5cf2407817b3aa2845d6b","datavalue":{"value":{"entity-type":"item","numeric-id":2333952,"id":"Q2333952"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P1629"]}]}],"P402":[{"mainsnak":{"snaktype":"value","property":"P402","hash":"7b234582f24bdba72597c2b1533e68f336766356","datavalue":{"value":"62518","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$f563a613-4837-8330-115d-ec6a1789701b","rank":"normal","references":[{"hash":"ee5f9c964b309c1c15d9c51bfcbc41b0cfea0462","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"5e54893ebccb71115114c9a33bd783bd38a5dafa","datavalue":{"value":{"entity-type":"item","numeric-id":936,"id":"Q936"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P2044":[{"mainsnak":{"snaktype":"value","property":"P2044","hash":"42b292106ff3105d2325f9893f00ce657e4deecc","datavalue":{"value":{"amount":"+115","unit":"http://www.wikidata.org/entity/Q11573","upperBound":"+116","lowerBound":"+114"},"type":"quantity"},"datatype":"quantity"},"type":"statement","id":"Q1040$f6384aea-4e30-9746-5805-a35f825b13e3","rank":"normal"},{"mainsnak":{"snaktype":"value","property":"P2044","hash":"5fa6030c1aa328bcd517892fa6244a501f2c276b","datavalue":{"value":{"amount":"+118","unit":"http://www.wikidata.org/entity/Q11573"},"type":"quantity"},"datatype":"quantity"},"type":"statement","id":"Q1040$B41A3BE5-8A5E-4AB7-BBE9-982DD4CBE6EB","rank":"normal"}],"P2872":[{"mainsnak":{"snaktype":"value","property":"P2872","hash":"2c7e9de0657d01d2f3e8a151db09a762dc2b10c8","datavalue":{"value":{"entity-type":"item","numeric-id":27960284,"id":"Q27960284"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$70fb0029-45cc-b587-9d07-2927850833a2","rank":"normal"}],"P1813":[{"mainsnak":{"snaktype":"value","property":"P1813","hash":"47e2227b062d5975f3e2dbcff7a5c37a35896b9a","datavalue":{"value":{"text":"Ka","language":"de"},"type":"monolingualtext"},"datatype":"monolingualtext"},"type":"statement","qualifiers":{"P31":[{"snaktype":"value","property":"P31","hash":"79ef508913fd30ffb0f22091b9792d4cce9e5b64","datavalue":{"value":{"entity-type":"item","numeric-id":28758335,"id":"Q28758335"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"qualifiers-order":["P31"],"id":"Q1040$D9D00818-982B-468C-954C-EDE3B16B3F44","rank":"normal","references":[{"hash":"863da0b98b7f3598aaad0af6a9294642a730e2ab","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"8036fdb134c6dc18fac8eeb153d05a4cc3c80411","datavalue":{"value":{"entity-type":"item","numeric-id":28657655,"id":"Q28657655"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}],"P485":[{"mainsnak":{"snaktype":"value","property":"P485","hash":"bd02ae94447a9150541458d1ba67c4f6131a0daa","datavalue":{"value":{"entity-type":"item","numeric-id":21040660,"id":"Q21040660"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$2A442D92-C147-46D8-86C3-9819CA8CB9B7","rank":"normal"}],"P1791":[{"mainsnak":{"snaktype":"value","property":"P1791","hash":"b6d4431201dca5ff69d13e82300b5649429c2da2","datavalue":{"value":{"entity-type":"item","numeric-id":29017256,"id":"Q29017256"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$ea7c6ec7-409b-d05e-e6ad-e6442061ad34","rank":"normal"}],"P463":[{"mainsnak":{"snaktype":"value","property":"P463","hash":"30a2fb9bd69b7a8ba94278464ea2209821824df3","datavalue":{"value":{"entity-type":"item","numeric-id":1780328,"id":"Q1780328"},"type":"wikibase-entityid"},"datatype":"wikibase-item"},"type":"statement","id":"Q1040$66577DC0-78C2-45A7-9BFA-B2E7C9C1F2AD","rank":"normal","references":[{"hash":"a8141f0dc21b09dab7a5f5bb112af1f32d9f6af0","snaks":{"P854":[{"snaktype":"value","property":"P854","hash":"041fa153b8aaed63269b54d5e43b02a98a892a11","datavalue":{"value":"http://www.kivbf.de/site/kivde/node/21213/Lde/index.html","type":"string"},"datatype":"url"}]},"snaks-order":["P854"]}]}],"P3417":[{"mainsnak":{"snaktype":"value","property":"P3417","hash":"a636bb538163e7c5d490972777714ca0404c4c96","datavalue":{"value":"Karlsruhe-Germany","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$495248c8-43d9-b259-5469-dc1cb679f83e","rank":"normal"}],"P1225":[{"mainsnak":{"snaktype":"value","property":"P1225","hash":"1e89c0e724990722679e76029446a5824c6578d9","datavalue":{"value":"10044932","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$50CDCE2A-2818-463F-ABF6-14563BC67DE0","rank":"normal"}],"P902":[{"mainsnak":{"snaktype":"value","property":"P902","hash":"973f341673c23bf0d923718fc198e2288f030766","datavalue":{"value":"6613","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$CD3959D8-5D52-492B-BB39-E8F4F4FE6864","rank":"normal"}],"P440":[{"mainsnak":{"snaktype":"value","property":"P440","hash":"b96dacf55af9384c17370e31825c447f4f1f5578","datavalue":{"value":"08212","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$3736F093-F8D2-4874-A634-582A85D105C3","rank":"normal"}],"P2581":[{"mainsnak":{"snaktype":"value","property":"P2581","hash":"8041e8105701b434b6f0c015f1d5e9b839aab876","datavalue":{"value":"00169414n","type":"string"},"datatype":"external-id"},"type":"statement","id":"Q1040$F774809C-7A39-43E9-9D1C-EC9D401C1CA8","rank":"normal","references":[{"hash":"bd0ee81488cd8c23d4b655e417aac78dca370283","snaks":{"P143":[{"snaktype":"value","property":"P143","hash":"a0b4f55dbedbadaf83e7d2461b47b7aeaba86359","datavalue":{"value":{"entity-type":"item","numeric-id":4837690,"id":"Q4837690"},"type":"wikibase-entityid"},"datatype":"wikibase-item"}]},"snaks-order":["P143"]}]}]},"sitelinks":{"afwiki":{"site":"afwiki","title":"Karlsruhe","badges":[]},"alswiki":{"site":"alswiki","title":"Karlsruhe","badges":[]},"arwiki":{"site":"arwiki","title":"\u0643\u0627\u0631\u0644\u0633\u0631\u0648\u0647","badges":[]},"azbwiki":{"site":"azbwiki","title":"\u06a9\u0627\u0631\u0644\u0633\u0631\u0648\u0647\u0647","badges":[]},"azwiki":{"site":"azwiki","title":"Karlsrue","badges":[]},"barwiki":{"site":"barwiki","title":"Karlsruhe","badges":[]},"be_x_oldwiki":{"site":"be_x_oldwiki","title":"\u041a\u0430\u0440\u043b\u044c\u0441\u0440\u0443\u044d","badges":[]},"bewiki":{"site":"bewiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"bgwiki":{"site":"bgwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435","badges":[]},"brwiki":{"site":"brwiki","title":"Karlsruhe","badges":[]},"cawiki":{"site":"cawiki","title":"Karlsruhe","badges":[]},"cebwiki":{"site":"cebwiki","title":"Karlsruhe (kapital sa distrito nga gobyerno)","badges":[]},"commonswiki":{"site":"commonswiki","title":"Karlsruhe","badges":[]},"cowiki":{"site":"cowiki","title":"Karlsruhe","badges":[]},"cswiki":{"site":"cswiki","title":"Karlsruhe","badges":[]},"cvwiki":{"site":"cvwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"cywiki":{"site":"cywiki","title":"Karlsruhe","badges":[]},"dawiki":{"site":"dawiki","title":"Karlsruhe","badges":[]},"dewiki":{"site":"dewiki","title":"Karlsruhe","badges":["Q17437798"]},"dewikinews":{"site":"dewikinews","title":"Kategorie:Karlsruhe","badges":[]},"dewikiquote":{"site":"dewikiquote","title":"Karlsruhe","badges":[]},"dewikisource":{"site":"dewikisource","title":"Karlsruhe","badges":[]},"dewikivoyage":{"site":"dewikivoyage","title":"Karlsruhe","badges":["Q17559452"]},"dsbwiki":{"site":"dsbwiki","title":"Karlsruhe","badges":[]},"elwiki":{"site":"elwiki","title":"\u039a\u03b1\u03c1\u03bb\u03c3\u03c1\u03bf\u03cd\u03b7","badges":[]},"enwiki":{"site":"enwiki","title":"Karlsruhe","badges":[]},"enwikivoyage":{"site":"enwikivoyage","title":"Karlsruhe","badges":[]},"eowiki":{"site":"eowiki","title":"Karlsruhe","badges":[]},"eswiki":{"site":"eswiki","title":"Karlsruhe","badges":[]},"etwiki":{"site":"etwiki","title":"Karlsruhe","badges":[]},"euwiki":{"site":"euwiki","title":"Karlsruhe","badges":[]},"fawiki":{"site":"fawiki","title":"\u06a9\u0627\u0631\u0644\u0633\u0631\u0648\u0647\u0647","badges":[]},"fiwiki":{"site":"fiwiki","title":"Karlsruhe","badges":[]},"frrwiki":{"site":"frrwiki","title":"Karlsruhe","badges":[]},"frwiki":{"site":"frwiki","title":"Karlsruhe","badges":[]},"frwikivoyage":{"site":"frwikivoyage","title":"Karlsruhe","badges":[]},"fywiki":{"site":"fywiki","title":"Karlsruhe (st\u00ead)","badges":[]},"gdwiki":{"site":"gdwiki","title":"Karlsruhe","badges":[]},"glwiki":{"site":"glwiki","title":"Karlsruhe","badges":[]},"hakwiki":{"site":"hakwiki","title":"Karlsruhe","badges":[]},"hewiki":{"site":"hewiki","title":"\u05e7\u05e8\u05dc\u05e1\u05e8\u05d5\u05d4\u05d4","badges":[]},"hiwiki":{"site":"hiwiki","title":"\u0915\u093e\u0930\u094d\u0932\u094d\u0938\u0930\u0941\u0939\u0947","badges":[]},"hrwiki":{"site":"hrwiki","title":"Karlsruhe","badges":[]},"hsbwiki":{"site":"hsbwiki","title":"Karlsruhe","badges":[]},"huwiki":{"site":"huwiki","title":"Karlsruhe","badges":[]},"hywiki":{"site":"hywiki","title":"\u053f\u0561\u0580\u056c\u057d\u0580\u0578\u0582\u0565","badges":[]},"idwiki":{"site":"idwiki","title":"Karlsruhe","badges":[]},"iewiki":{"site":"iewiki","title":"Karlsruhe","badges":[]},"iowiki":{"site":"iowiki","title":"Karlsruhe","badges":[]},"iswiki":{"site":"iswiki","title":"Karlsruhe","badges":[]},"itwiki":{"site":"itwiki","title":"Karlsruhe","badges":[]},"itwikivoyage":{"site":"itwikivoyage","title":"Karlsruhe","badges":[]},"jawiki":{"site":"jawiki","title":"\u30ab\u30fc\u30eb\u30b9\u30eb\u30fc\u30a8","badges":[]},"kawiki":{"site":"kawiki","title":"\u10d9\u10d0\u10e0\u10da\u10e1\u10e0\u10e3\u10d4","badges":[]},"kowiki":{"site":"kowiki","title":"\uce74\ub97c\uc2a4\ub8e8\uc5d0","badges":[]},"kuwiki":{"site":"kuwiki","title":"Karlsruhe","badges":[]},"lawiki":{"site":"lawiki","title":"Carolsruha","badges":[]},"lbwiki":{"site":"lbwiki","title":"Karlsruhe","badges":[]},"lmowiki":{"site":"lmowiki","title":"Karlsruhe","badges":[]},"ltwiki":{"site":"ltwiki","title":"Karlsr\u016bj\u0117","badges":[]},"lvwiki":{"site":"lvwiki","title":"Karlsr\u016be","badges":[]},"mkwiki":{"site":"mkwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435","badges":[]},"mnwiki":{"site":"mnwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"mrwiki":{"site":"mrwiki","title":"\u0915\u093e\u0930\u094d\u0932\u094d\u0938\u0930\u0942\u0939","badges":[]},"mswiki":{"site":"mswiki","title":"Karlsruhe","badges":[]},"ndswiki":{"site":"ndswiki","title":"Karlsruhe","badges":[]},"nlwiki":{"site":"nlwiki","title":"Karlsruhe (stad)","badges":[]},"nlwikinews":{"site":"nlwikinews","title":"Categorie:Karlsruhe","badges":[]},"nnwiki":{"site":"nnwiki","title":"Karlsruhe","badges":[]},"nowiki":{"site":"nowiki","title":"Karlsruhe","badges":[]},"ocwiki":{"site":"ocwiki","title":"Karlsruhe","badges":[]},"pflwiki":{"site":"pflwiki","title":"Kallsruh","badges":[]},"plwiki":{"site":"plwiki","title":"Karlsruhe","badges":[]},"plwikivoyage":{"site":"plwikivoyage","title":"Karlsruhe","badges":[]},"pnbwiki":{"site":"pnbwiki","title":"\u06a9\u0627\u0631\u0644\u0632\u0631\u0648\u06c1\u06d2 \u0645\u062d\u0644","badges":[]},"ptwiki":{"site":"ptwiki","title":"Karlsruhe","badges":[]},"rowiki":{"site":"rowiki","title":"Karlsruhe","badges":[]},"rowikivoyage":{"site":"rowikivoyage","title":"Karlsruhe","badges":[]},"ruwiki":{"site":"ruwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"ruwikivoyage":{"site":"ruwikivoyage","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"sahwiki":{"site":"sahwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"scowiki":{"site":"scowiki","title":"Karlsruhe","badges":[]},"shwiki":{"site":"shwiki","title":"Karlsruhe","badges":[]},"simplewiki":{"site":"simplewiki","title":"Karlsruhe","badges":[]},"skwiki":{"site":"skwiki","title":"Karlsruhe (mesto)","badges":[]},"sqwiki":{"site":"sqwiki","title":"Karlsruhe","badges":[]},"srwiki":{"site":"srwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435","badges":[]},"stqwiki":{"site":"stqwiki","title":"Karlsruhe","badges":[]},"svwiki":{"site":"svwiki","title":"Karlsruhe","badges":[]},"svwikivoyage":{"site":"svwikivoyage","title":"Karlsruhe","badges":[]},"swwiki":{"site":"swwiki","title":"Karlsruhe","badges":[]},"szlwiki":{"site":"szlwiki","title":"Karlsruhe","badges":[]},"thwiki":{"site":"thwiki","title":"\u0e04\u0e32\u0e23\u0e4c\u0e25\u0e2a\u0e23\u0e39\u0e40\u0e2d\u0e2d","badges":[]},"tkwiki":{"site":"tkwiki","title":"Karlsrue","badges":[]},"trwiki":{"site":"trwiki","title":"Karlsruhe","badges":[]},"ttwiki":{"site":"ttwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u044d","badges":[]},"twwiki":{"site":"twwiki","title":"Karlsruhe","badges":[]},"ukwiki":{"site":"ukwiki","title":"\u041a\u0430\u0440\u043b\u0441\u0440\u0443\u0435","badges":[]},"uzwiki":{"site":"uzwiki","title":"Karlsruhe","badges":[]},"viwiki":{"site":"viwiki","title":"Karlsruhe","badges":[]},"vowiki":{"site":"vowiki","title":"Karlsruhe","badges":[]},"warwiki":{"site":"warwiki","title":"Karlsruhe","badges":[]},"yiwiki":{"site":"yiwiki","title":"\u05e7\u05d0\u05e8\u05dc\u05e1\u05e8\u05d5\u05e2","badges":[]},"zh_min_nanwiki":{"site":"zh_min_nanwiki","title":"Karlsruhe","badges":[]},"zh_yuewiki":{"site":"zh_yuewiki","title":"\u5361\u723e\u65af\u9b6f\u4e9e","badges":[]},"zhwiki":{"site":"zhwiki","title":"\u5361\u5c14\u65af\u9c81\u5384","badges":[]}}}},"success":1}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-Q6-Q42-P31.json000066400000000000000000002724431444772566300310000ustar00rootroot00000000000000{"entities":{"Q6":{"id":"Q6","missing":""},"Q42":{"pageid":138,"ns":0,"title":"Q42","lastrevid":196015688,"modified":"2015-02-13T00:11:48Z","id":"Q42","type":"item","aliases":{"en":[{"language":"en","value":"Douglas No\u00ebl Adams"},{"language":"en","value":"Douglas Noel Adams"}],"ru":[{"language":"ru","value":"\u0410\u0434\u0430\u043c\u0441, \u0414\u0443\u0433\u043b\u0430\u0441"}],"nb":[{"language":"nb","value":"Douglas No\u00ebl Adams"}],"fr":[{"language":"fr","value":"Douglas Noel Adams"}],"de":[{"language":"de","value":"Douglas No\u00ebl Adams"}],"pt-br":[{"language":"pt-br","value":"Douglas No\u00ebl Adams"},{"language":"pt-br","value":"Douglas Noel Adams"}],"be-tarask":[{"language":"be-tarask","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"}],"zh":[{"language":"zh","value":"\u4e9e\u7576\u65af"}],"es":[{"language":"es","value":"Douglas Noel Adams"}],"it":[{"language":"it","value":"Douglas Noel Adams"}],"cs":[{"language":"cs","value":"Douglas No\u00ebl Adams"},{"language":"cs","value":"Douglas Noel Adams"}],"hy":[{"language":"hy","value":"\u0531\u0564\u0561\u0574\u057d, \u0534\u0578\u0582\u0563\u056c\u0561\u057d"}],"el":[{"language":"el","value":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u039d\u03cc\u03b5\u03bb \u0386\u03bd\u03c4\u03b1\u03bc\u03c2"}]},"labels":{"fr":{"language":"fr","value":"Douglas Adams"},"ru":{"language":"ru","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"pl":{"language":"pl","value":"Douglas Adams"},"it":{"language":"it","value":"Douglas Adams"},"en-gb":{"language":"en-gb","value":"Douglas Adams"},"nb":{"language":"nb","value":"Douglas Adams"},"es":{"language":"es","value":"Douglas Adams"},"en-ca":{"language":"en-ca","value":"Douglas Adams"},"hr":{"language":"hr","value":"Douglas Adams"},"pt":{"language":"pt","value":"Douglas Adams"},"ko":{"language":"ko","value":"\ub354\uae00\ub7ec\uc2a4 \uc560\ub364\uc2a4"},"nl":{"language":"nl","value":"Douglas Adams"},"el":{"language":"el","value":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u0386\u03bd\u03c4\u03b1\u03bc\u03c2"},"ar":{"language":"ar","value":"\u062f\u0648\u063a\u0644\u0627\u0633 \u0622\u062f\u0645\u0632"},"arz":{"language":"arz","value":"\u062f\u0648\u062c\u0644\u0627\u0633 \u0627\u062f\u0627\u0645\u0632"},"bar":{"language":"bar","value":"Douglas Adams"},"be":{"language":"be","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"bg":{"language":"bg","value":"\u0414\u044a\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"bs":{"language":"bs","value":"Douglas Adams"},"ca":{"language":"ca","value":"Douglas Adams"},"cs":{"language":"cs","value":"Douglas Adams"},"cy":{"language":"cy","value":"Douglas Adams"},"da":{"language":"da","value":"Douglas Adams"},"eo":{"language":"eo","value":"Douglas ADAMS"},"et":{"language":"et","value":"Douglas Adams"},"fa":{"language":"fa","value":"\u062f\u0627\u06af\u0644\u0627\u0633 \u0622\u062f\u0627\u0645\u0632"},"fi":{"language":"fi","value":"Douglas Adams"},"ga":{"language":"ga","value":"Douglas Adams"},"gl":{"language":"gl","value":"Douglas Adams"},"he":{"language":"he","value":"\u05d3\u05d0\u05d2\u05dc\u05e1 \u05d0\u05d3\u05d0\u05de\u05e1"},"hu":{"language":"hu","value":"Douglas Adams"},"id":{"language":"id","value":"Douglas Adams"},"io":{"language":"io","value":"Douglas Adams"},"is":{"language":"is","value":"Douglas Adams"},"ja":{"language":"ja","value":"\u30c0\u30b0\u30e9\u30b9\u30fb\u30a2\u30c0\u30e0\u30ba"},"jv":{"language":"jv","value":"Douglas Adams"},"ka":{"language":"ka","value":"\u10d3\u10d0\u10d2\u10da\u10d0\u10e1 \u10d0\u10d3\u10d0\u10db\u10e1\u10d8"},"la":{"language":"la","value":"Duglassius Adams"},"lv":{"language":"lv","value":"Duglass Adamss"},"mk":{"language":"mk","value":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"mr":{"language":"mr","value":"\u0921\u0917\u094d\u0932\u0938 \u0905\u0945\u0921\u092e\u094d\u0938"},"nn":{"language":"nn","value":"Douglas Adams"},"ro":{"language":"ro","value":"Douglas Adams"},"sco":{"language":"sco","value":"Douglas Adams"},"sh":{"language":"sh","value":"Douglas Adams"},"sk":{"language":"sk","value":"Douglas Adams"},"sl":{"language":"sl","value":"Douglas Adams"},"sq":{"language":"sq","value":"Douglas Adams"},"sr":{"language":"sr","value":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"sv":{"language":"sv","value":"Douglas Adams"},"ta":{"language":"ta","value":"\u0b9f\u0b95\u0bcd\u0bb3\u0bb8\u0bcd \u0b86\u0b9f\u0bae\u0bcd\u0bb8\u0bcd"},"tr":{"language":"tr","value":"Douglas Adams"},"uk":{"language":"uk","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"vi":{"language":"vi","value":"Douglas Adams"},"zh":{"language":"zh","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-cn":{"language":"zh-cn","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-hans":{"language":"zh-hans","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-hant":{"language":"zh-hant","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"de-ch":{"language":"de-ch","value":"Douglas Adams"},"pt-br":{"language":"pt-br","value":"Douglas Adams"},"zh-sg":{"language":"zh-sg","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-my":{"language":"zh-my","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af"},"zh-hk":{"language":"zh-hk","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"zh-tw":{"language":"zh-tw","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"zh-mo":{"language":"zh-mo","value":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af"},"war":{"language":"war","value":"Douglas Adams"},"be-tarask":{"language":"be-tarask","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0437"},"be-x-old":{"language":"be-x-old","value":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"vep":{"language":"vep","value":"Adams Duglas"},"ur":{"language":"ur","value":"\u0688\u06af\u0644\u0633 \u0627\u06cc\u0688\u0645"},"oc":{"language":"oc","value":"Douglas Adams"},"af":{"language":"af","value":"Douglas Adams"},"an":{"language":"an","value":"Douglas Adams"},"br":{"language":"br","value":"Douglas Adams"},"eu":{"language":"eu","value":"Douglas Adams"},"lb":{"language":"lb","value":"Douglas Adams"},"lmo":{"language":"lmo","value":"Douglas Adams"},"lt":{"language":"lt","value":"Douglas Adams"},"nds":{"language":"nds","value":"Douglas Adams"},"nds-nl":{"language":"nds-nl","value":"Douglas Adams"},"nl-informal":{"language":"nl-informal","value":"Douglas Adams"},"pms":{"language":"pms","value":"Douglas Adams"},"vec":{"language":"vec","value":"Douglas Adams"},"wa":{"language":"wa","value":"Douglas Adams"},"sr-ec":{"language":"sr-ec","value":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441"},"sr-el":{"language":"sr-el","value":"Daglas Adams"},"de":{"language":"de","value":"Douglas Adams"},"en":{"language":"en","value":"Douglas Adams"},"ckb":{"language":"ckb","value":"\u062f\u06d5\u06af\u0644\u0627\u0633 \u0626\u0627\u062f\u0645\u0632"},"fo":{"language":"fo","value":"Douglas Adams"},"kl":{"language":"kl","value":"Douglas Adams"},"gsw":{"language":"gsw","value":"Douglas Adams"},"te":{"language":"te","value":"\u0c21\u0c17\u0c4d\u0c32\u0c38\u0c4d \u0c06\u0c21\u0c2e\u0c4d\u0c38\u0c4d"},"si":{"language":"si","value":"\u0da9\u0d9c\u0dca\u0dbd\u0dc3\u0dca \u0d87\u0da9\u0db8\u0dca\u0dc3\u0dca"},"bn":{"language":"bn","value":"\u09a1\u0997\u09b2\u09be\u09b8"},"hi":{"language":"hi","value":"\u0921\u0917\u094d\u0932\u0938 \u0905\u200d\u0921\u092e\u094d\u0938"},"rwr":{"language":"rwr","value":"\u0921\u0917\u094d\u0932\u0938 \u0905\u200d\u0921\u092e\u094d\u0938"},"mg":{"language":"mg","value":"Douglas Adams"},"ml":{"language":"ml","value":"\u0d21\u0d17\u0d4d\u0d32\u0d38\u0d4d \u0d06\u0d21\u0d02\u0d38\u0d4d"},"gu":{"language":"gu","value":"\u0aa1\u0a97\u0acd\u0ab2\u0abe\u0ab8 \u0a8f\u0aa1\u0aae\u0acd\u0ab8"},"hy":{"language":"hy","value":"\u0534\u0578\u0582\u0563\u056c\u0561\u057d \u0531\u0564\u0561\u0574\u057d"},"ast":{"language":"ast","value":"Douglas Adams"},"co":{"language":"co","value":"Douglas Adams"},"de-at":{"language":"de-at","value":"Douglas Adams"},"frp":{"language":"frp","value":"Douglas Adams"},"fur":{"language":"fur","value":"Douglas Adams"},"gd":{"language":"gd","value":"Douglas Adams"},"ia":{"language":"ia","value":"Douglas Adams"},"ie":{"language":"ie","value":"Douglas Adams"},"kg":{"language":"kg","value":"Douglas Adams"},"li":{"language":"li","value":"Douglas Adams"},"lij":{"language":"lij","value":"Douglas Adams"},"min":{"language":"min","value":"Douglas Adams"},"ms":{"language":"ms","value":"Douglas Adams"},"nap":{"language":"nap","value":"Douglas Adams"},"nrm":{"language":"nrm","value":"Douglas Adams"},"pcd":{"language":"pcd","value":"Douglas Adams"},"rm":{"language":"rm","value":"Douglas Adams"},"sc":{"language":"sc","value":"Douglas Adams"},"scn":{"language":"scn","value":"Douglas Adams"},"sw":{"language":"sw","value":"Douglas Adams"},"vls":{"language":"vls","value":"Douglas Adams"},"vo":{"language":"vo","value":"Douglas Adams"},"wo":{"language":"wo","value":"Douglas Adams"},"zu":{"language":"zu","value":"Douglas Adams"},"az":{"language":"az","value":"Duqlas Noel Adams"},"ak":{"language":"ak","value":"Doglas Adams"}},"descriptions":{"en":{"language":"en","value":"English writer and humorist"},"fr":{"language":"fr","value":"\u00e9crivain anglais de science-fiction"},"en-gb":{"language":"en-gb","value":"English writer and humourist"},"nb":{"language":"nb","value":"engelsk science fiction-forfatter og humorist"},"it":{"language":"it","value":"scrittore inglese"},"de":{"language":"de","value":"britischer Schriftsteller"},"es":{"language":"es","value":"escritor y guionista radiof\u00f3nico brit\u00e1nico"},"ru":{"language":"ru","value":"\u0430\u043d\u0433\u043b\u0438\u0439\u0441\u043a\u0438\u0439 \u043f\u0438\u0441\u0430\u0442\u0435\u043b\u044c, \u0434\u0440\u0430\u043c\u0430\u0442\u0443\u0440\u0433 \u0438 \u0441\u0446\u0435\u043d\u0430\u0440\u0438\u0441\u0442, \u0430\u0432\u0442\u043e\u0440 \u0441\u0435\u0440\u0438\u0438 \u043a\u043d\u0438\u0433 \u00ab\u0410\u0432\u0442\u043e\u0441\u0442\u043e\u043f\u043e\u043c \u043f\u043e \u0433\u0430\u043b\u0430\u043a\u0442\u0438\u043a\u0435\u00bb."},"zh-hans":{"language":"zh-hans","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-hant":{"language":"zh-hant","value":"\u82f1\u570b\u4f5c\u5bb6"},"zh-cn":{"language":"zh-cn","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-sg":{"language":"zh-sg","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-my":{"language":"zh-my","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh":{"language":"zh","value":"\u82f1\u56fd\u4f5c\u5bb6"},"zh-hk":{"language":"zh-hk","value":"\u82f1\u570b\u4f5c\u5bb6"},"zh-tw":{"language":"zh-tw","value":"\u82f1\u570b\u4f5c\u5bb6"},"zh-mo":{"language":"zh-mo","value":"\u82f1\u570b\u4f5c\u5bb6"},"ca":{"language":"ca","value":"escriptor angl\u00e8s"},"fi":{"language":"fi","value":"englantilainen kirjailija ja humoristi"},"cs":{"language":"cs","value":"anglick\u00fd spisovatel, humorista a dramatik"},"sv":{"language":"sv","value":"brittisk f\u00f6rfattare och humorist"},"pt-br":{"language":"pt-br","value":"escritor e humorista ingl\u00eas"},"ta":{"language":"ta","value":"\u0b86\u0b99\u0bcd\u0b95\u0bbf\u0bb2 \u0b8e\u0bb4\u0bc1\u0ba4\u0bcd\u0ba4\u0bbe\u0bb3\u0bb0\u0bcd \u0bae\u0bb1\u0bcd\u0bb1\u0bc1\u0bae\u0bcd \u0ba8\u0b95\u0bc8\u0b9a\u0bcd\u0b9a\u0bc1\u0bb5\u0bc8\u0baf\u0bbe\u0bb3\u0bb0\u0bcd"},"sl":{"language":"sl","value":"angle\u0161ki pisatelj, humorist in dramatik"},"da":{"language":"da","value":"forfatter"},"nl":{"language":"nl","value":"Engels sciencefictionschrijver"},"pt":{"language":"pt","value":"escritor e roteirista ingl\u00eas"},"pl":{"language":"pl","value":"brytyjski pisarz"},"lv":{"language":"lv","value":"ang\u013cu zin\u0101tnisk\u0101s fantastikas rakstnieks un humorists"},"simple":{"language":"simple","value":"writer and humorist from England"},"sr":{"language":"sr","value":"\u0435\u043d\u0433\u043b\u0435\u0441\u043a\u0438 \u043f\u0438\u0441\u0430\u0446 \u043d\u0430\u0443\u0447\u043d\u0435 \u0444\u0430\u043d\u0442\u0430\u0441\u0442\u0438\u043a\u0435 \u0438 \u0445\u0443\u043c\u043e\u0440\u0438\u0441\u0442\u0430"},"sr-ec":{"language":"sr-ec","value":"\u0435\u043d\u0433\u043b\u0435\u0441\u043a\u0438 \u043f\u0438\u0441\u0430\u0446 \u043d\u0430\u0443\u0447\u043d\u0435 \u0444\u0430\u043d\u0442\u0430\u0441\u0442\u0438\u043a\u0435 \u0438 \u0445\u0443\u043c\u043e\u0440\u0438\u0441\u0442\u0430"},"sr-el":{"language":"sr-el","value":"engleski pisac nau\u010dne fantastike i humorista"},"eo":{"language":"eo","value":"angla a\u016dtoro de sciencfikcio-romanoj kaj humoristo"},"bar":{"language":"bar","value":"a englischer Science-Fiction-Schriftsteller"},"br":{"language":"br","value":"skrivagner saoznek"},"ja":{"language":"ja","value":"\u30a4\u30f3\u30b0\u30e9\u30f3\u30c9\u306e\u4f5c\u5bb6"},"nn":{"language":"nn","value":"engelsk sciencefictionforfattar og humorist"},"tr":{"language":"tr","value":"\u0130ngiliz bilim kurgu ve mizah yazar\u0131"},"si":{"language":"si","value":"\u0d89\u0d82\u0d9c\u0dca\u200d\u0dbb\u0dd3\u0dc3\u0dd2 \u0d9a\u0dc0\u0dd2\u0dba\u0dd9\u0d9a\u0dca"},"vi":{"language":"vi","value":"Nh\u00e0 v\u0103n v\u00e0 nh\u00e0 so\u1ea1n h\u00e0i k\u1ecbch ng\u01b0\u1eddi Anh"},"cy":{"language":"cy","value":"awdur a dychanwr Seisnig"},"gu":{"language":"gu","value":"\u0a85\u0a82\u0a97\u0acd\u0ab0\u0ac7\u0a9c\u0ac0 \u0ab2\u0ac7\u0a96\u0a95 \u0a85\u0aa8\u0ac7 \u0ab9\u0abe\u0ab8\u0acd\u0aaf\u0a95\u0abe\u0ab0"},"uk":{"language":"uk","value":"\u0431\u0440\u0438\u0442\u0430\u043d\u0441\u044c\u043a\u0438\u0439 \u043a\u043e\u043c\u0456\u0447\u043d\u0438\u0439 \u0440\u0430\u0434\u0456\u043e\u0434\u0440\u0430\u043c\u0430\u0442\u0443\u0440\u0433, \u043f\u0438\u0441\u044c\u043c\u0435\u043d\u043d\u0438\u043a"},"ro":{"language":"ro","value":"scriitor, dramaturg englez"},"hu":{"language":"hu","value":"angol \u00edr\u00f3"},"fa":{"language":"fa","value":"\u0641\u06cc\u0644\u0645\u0646\u0627\u0645\u0647\u200c\u0646\u0648\u06cc\u0633 \u0648 \u0646\u0648\u06cc\u0633\u0646\u062f\u0647 \u0628\u0631\u06cc\u062a\u0627\u0646\u06cc\u0627\u06cc\u06cc"},"af":{"language":"af","value":"Engelse skrywer en humoris"},"mk":{"language":"mk","value":"\u0430\u043d\u0433\u043b\u0438\u0441\u043a\u0438 \u043f\u0438\u0441\u0430\u0442\u0435\u043b"},"el":{"language":"el","value":"\u0386\u03b3\u03b3\u03bb\u03bf\u03c2 \u03c3\u03c5\u03b3\u03b3\u03c1\u03b1\u03c6\u03ad\u03b1\u03c2"},"hy":{"language":"hy","value":"\u0561\u0576\u0563\u056c\u056b\u0561\u0581\u056b \u0563\u0580\u0578\u0572, \u0564\u0580\u0561\u0574\u0561\u057f\u0578\u0582\u0580\u0563, \u057d\u0581\u0565\u0576\u0561\u0580\u056b\u057d\u057f, \u00ab\u0531\u057e\u057f\u0578\u057d\u057f\u0578\u057a\u0578\u057e \u0566\u0562\u0578\u057d\u0561\u0577\u0580\u057b\u056b\u056f\u056b \u0574\u056b\u057b\u0563\u0561\u056c\u0561\u056f\u057f\u056b\u056f\u0561\u056f\u0561\u0576 \u0578\u0582\u0572\u0565\u0581\u0578\u0582\u0575\u0581\u00bb \u057e\u0565\u057a\u0565\u0580\u056b \u0577\u0561\u0580\u0584"}},"claims":{"P31":[{"id":"Q42$F078E5B3-F9A8-480E-B7AC-D97778CBBEF9","mainsnak":{"snaktype":"value","property":"P31","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"792979348becb9132618e89ee55b5fa4a5004282","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P214","P813"]}]}],"P1368":[{"id":"Q42$11725e9f-4f81-e0fd-b00a-b885fe7a75ac","mainsnak":{"snaktype":"value","property":"P1368","datatype":"string","datavalue":{"value":"000057405","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1477":[{"id":"Q42$45220d20-40d2-299e-f4cc-f6cce89f2f42","mainsnak":{"snaktype":"value","property":"P1477","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas No\u00ebl Adams","language":"en"},"type":"monolingualtext"}},"type":"statement","rank":"normal","references":[{"hash":"e5b51fa989eda0343986f0b592fde988b3d66f5f","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Obituary: Douglas Adams","language":"en"},"type":"monolingualtext"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11148},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.theguardian.com/news/2001/may/15/guardianobituaries.books","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P50":[{"snaktype":"value","property":"P50","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18145749},"type":"wikibase-entityid"}}]},"snaks-order":["P1476","P123","P577","P364","P854","P813","P50"]}]}],"P1015":[{"id":"Q42$6583fdb7-4ffa-9fe1-4288-1a1cbb2950d0","mainsnak":{"snaktype":"value","property":"P1015","datatype":"string","datavalue":{"value":"x90196888","type":"string"}},"type":"statement","rank":"normal"}],"P735":[{"id":"Q42$1d7d0ea9-412f-8b5b-ba8d-405ab9ecf026","mainsnak":{"snaktype":"value","property":"P735","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":463035},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P734":[{"id":"Q42$24df999a-4629-c679-e1f0-199bcefabbf3","mainsnak":{"snaktype":"value","property":"P734","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":351735},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P21":[{"id":"q42$39F4DE4F-C277-449C-9F99-512350971B5B","mainsnak":{"snaktype":"value","property":"P21","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":6581097},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"792979348becb9132618e89ee55b5fa4a5004282","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P214","P813"]}]}],"P18":[{"id":"q42$43D37345-54ED-4FF2-A226-EC26A356E38D","mainsnak":{"snaktype":"value","property":"P18","datatype":"commonsMedia","datavalue":{"value":"Douglas adams portrait cropped.jpg","type":"string"}},"type":"statement","rank":"normal"}],"P19":[{"id":"q42$3D284234-52BC-4DA3-83A3-7C39F84BA518","mainsnak":{"snaktype":"value","property":"P19","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":350},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]},{"hash":"ff3ff2646d7b04b3f9c756d928674cf646611ddd","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.theguardian.com/news/2001/may/15/guardianobituaries.books","type":"string"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1433":[{"snaktype":"value","property":"P1433","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11148},"type":"wikibase-entityid"}}],"P50":[{"snaktype":"value","property":"P50","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18145749},"type":"wikibase-entityid"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Obituary: Douglas Adams","language":"en"},"type":"monolingualtext"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"snaks-order":["P854","P577","P813","P1433","P50","P1476","P407"]},{"hash":"3252cbd7da4157b7a8ee2c244269904bb7c0c3dd","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Hitch Hiker's Guide author Douglas Adams dies aged 49","language":"en"},"type":"monolingualtext"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-13T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":192621},"type":"wikibase-entityid"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.telegraph.co.uk/news/uknews/1330072/Hitch-Hikers-Guide-author-Douglas-Adams-dies-aged-49.html","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002015-01-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P1476","P577","P123","P407","P854","P813"]}]}],"P27":[{"id":"q42$DE2A0C89-6199-44D0-B727-D7A4BE031A2B","mainsnak":{"snaktype":"value","property":"P27","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":145},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"792979348becb9132618e89ee55b5fa4a5004282","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}],"P214":[{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P214","P813"]}]}],"P103":[{"id":"q42$D9E6DEFB-472B-44F6-A8E2-E2B90700C74A","mainsnak":{"snaktype":"value","property":"P103","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"05479b1ec63c2230bb1f6340bfd909abe679aae9","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":193563},"type":"wikibase-entityid"}}],"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P268","P813"]}]}],"P119":[{"id":"q42$881F40DC-0AFE-4FEB-B882-79600D234273","mainsnak":{"snaktype":"value","property":"P119","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":533697},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"280eb23b01b60c6fe82056319e3a7349b59cb906","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}],"P535":[{"snaktype":"value","property":"P535","datatype":"string","datavalue":{"value":"22814","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P535","P813"]},{"hash":"efc8cf9ecfe4d3f72336553c10b698fb7a03211e","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://highgatecemetery.org/visit/who","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":533697},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Who\u2019s here","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]}]}],"P20":[{"id":"q42$C0DE2013-54C0-48F9-AD90-8A235248D8C7","mainsnak":{"snaktype":"value","property":"P20","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":159288},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]},{"hash":"e38dadec9d6b784aab71b64e334557250d40c256","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P140":[{"id":"q42$8419C20C-8EF8-4EC0-80D6-AF1CA55E7557","mainsnak":{"snaktype":"value","property":"P140","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":7066},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"201520d4aa7855e84587a77e295a84b8278e28a1","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.douglasadams.eu/en_adams_athee.php","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams and God. Portrait of a radical atheist","language":"en"},"type":"monolingualtext"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"snaks-order":["P854","P813","P1476","P407"]},{"hash":"bc8854326642498c296cdb0afabd986e06536fd7","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nichirenbuddhist.org/Religion/Atheists/DouglasAdams/Interview-American-Atheists.html","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15290435},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002002-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams' Interview with American Atheists","language":"en"},"type":"monolingualtext"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"snaks-order":["P854","P123","P577","P813","P1476","P407"]}]}],"P106":[{"id":"q42$E13E619F-63EF-4B72-99D9-7A45C7C6AD34","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":28389},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"05479b1ec63c2230bb1f6340bfd909abe679aae9","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":193563},"type":"wikibase-entityid"}}],"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P268","P813"]}]},{"id":"Q42$D6E21D67-05D6-4A0B-8458-0744FCEED13D","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":6625963},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q42$7eb8aaef-4ddf-8b87-bd02-406f91a296bd","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":4853732},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"q42$CBDC4890-D5A2-469C-AEBB-EFB682B891E7","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18844224},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"05479b1ec63c2230bb1f6340bfd909abe679aae9","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":193563},"type":"wikibase-entityid"}}],"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P268","P813"]}]},{"id":"Q42$58F0D772-9CE4-46AC-BF0D-FBBBAFA09603","mainsnak":{"snaktype":"value","property":"P106","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":36180},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P244":[{"id":"q42$2D472379-EC67-4C71-9700-0F9D551BF5E6","mainsnak":{"snaktype":"value","property":"P244","datatype":"string","datavalue":{"value":"n80076765","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3e9859118d01bc62b5dbe8939be812333eb7c594","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1551807},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P947":[{"id":"Q42$cf5f61ec-440d-60d4-7847-e95f75171f2f","mainsnak":{"snaktype":"value","property":"P947","datatype":"string","datavalue":{"value":"000002833","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"980624fa9331261f9383f286b4056619228b626f","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1048694},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P214":[{"id":"q42$488251B2-6732-4D49-85B0-6101803C97AB","mainsnak":{"snaktype":"value","property":"P214","datatype":"string","datavalue":{"value":"113230702","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3e9859118d01bc62b5dbe8939be812333eb7c594","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1551807},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P345":[{"id":"q42$231549F5-0296-4D87-993D-6CBE3F24C0D2","mainsnak":{"snaktype":"value","property":"P345","datatype":"string","datavalue":{"value":"nm0010930","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P373":[{"id":"q42$7EC4631F-FB22-4768-9B75-61875CD6C854","mainsnak":{"snaktype":"value","property":"P373","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}},"type":"statement","rank":"normal"}],"P349":[{"id":"q42$31B1BC2A-D09F-4151-AD2B-5CEA229B9058","mainsnak":{"snaktype":"value","property":"P349","datatype":"string","datavalue":{"value":"00430962","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P213":[{"id":"q42$1CF5840B-A274-402B-9556-F202C2F9B831","mainsnak":{"snaktype":"value","property":"P213","datatype":"string","datavalue":{"value":"0000 0000 8045 6315","type":"string"}},"type":"statement","rank":"normal"}],"P434":[{"id":"q42$fc61f952-4071-7cc1-c20a-dc7a90ad6515","mainsnak":{"snaktype":"value","property":"P434","datatype":"string","datavalue":{"value":"e9ed318d-8cc5-4cf8-ab77-505e39ab6ea4","type":"string"}},"type":"statement","rank":"normal"}],"P269":[{"id":"q42$D0E17F5E-4302-43F8-926B-5FE7AA8A4380","mainsnak":{"snaktype":"value","property":"P269","datatype":"string","datavalue":{"value":"026677636","type":"string"}},"type":"statement","rank":"normal"}],"P268":[{"id":"q42$BB4B67FE-FECA-4469-9DEE-3E8F03AC9F1D","mainsnak":{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"f70116eac7f49194478b3025330bfd8dcffa3c69","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":8447},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P227":[{"id":"q42$8AA8CCC1-86CE-4C66-88FC-267621A81EA0","mainsnak":{"snaktype":"value","property":"P227","datatype":"string","datavalue":{"value":"119033364","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3e9859118d01bc62b5dbe8939be812333eb7c594","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1551807},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P535":[{"id":"q42$0DD4F039-6CDC-40C9-871B-63CDE4A47032","mainsnak":{"snaktype":"value","property":"P535","datatype":"string","datavalue":{"value":"22814","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"3d49e63975e13090ebe487f750eb8e4dd6304748","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Noel Adams","language":"en"},"type":"monolingualtext"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.findagrave.com/cgi-bin/fg.cgi?page=gr&GRid=22814","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002001-06-25T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P1476","P854","P123","P577","P813"]}]}],"P509":[{"id":"q42$E651BD8A-EA3E-478A-8558-C956EE60B29F","mainsnak":{"snaktype":"value","property":"P509","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":12152},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"280eb23b01b60c6fe82056319e3a7349b59cb906","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":63056},"type":"wikibase-entityid"}}],"P535":[{"snaktype":"value","property":"P535","datatype":"string","datavalue":{"value":"22814","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P535","P813"]},{"hash":"d82842ab8605e069bb11dc20e2a20f186c52612e","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.historyorb.com/people/douglas-adams","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15290366},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Famous People - Douglas Adams","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]},{"hash":"4541b8ccffc595a5aa34cae7b32ec534a2684c5f","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Obituary: Douglas Adams","language":"en"},"type":"monolingualtext"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":11148},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-15T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.theguardian.com/news/2001/may/15/guardianobituaries.books","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002014-01-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P50":[{"snaktype":"value","property":"P50","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":18145749},"type":"wikibase-entityid"}}]},"snaks-order":["P1476","P123","P577","P407","P854","P813","P50"]},{"hash":"f62e6b5fec90e28807b9f78e7446ca7710ff08c5","snaks":{"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Hitch Hiker's Guide author Douglas Adams dies aged 49","language":"en"},"type":"monolingualtext"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":192621},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-13T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P407":[{"snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.telegraph.co.uk/news/uknews/1330072/Hitch-Hikers-Guide-author-Douglas-Adams-dies-aged-49.html","type":"string"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002014-01-03T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P1476","P123","P577","P407","P854","P813"]}]}],"P569":[{"id":"q42$D8404CDA-25E4-4334-AF13-A3290BCD9C0F","mainsnak":{"snaktype":"value","property":"P569","datatype":"time","datavalue":{"value":{"time":"+00000001952-03-11T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}},"type":"statement","rank":"normal","references":[{"hash":"3d86e0972a03346a098a5703df2232b01c418ded","snaks":{"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15222191},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P268","P248","P813"]},{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P570":[{"id":"q42$65EA9C32-B26C-469B-84FE-FC612B71D159","mainsnak":{"snaktype":"value","property":"P570","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-11T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}},"type":"statement","rank":"normal","references":[{"hash":"3d86e0972a03346a098a5703df2232b01c418ded","snaks":{"P268":[{"snaktype":"value","property":"P268","datatype":"string","datavalue":{"value":"11888092r","type":"string"}}],"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15222191},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P268","P248","P813"]},{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P691":[{"id":"q42$704392C4-6E77-4E25-855F-7CF2D198DD6A","mainsnak":{"snaktype":"value","property":"P691","datatype":"string","datavalue":{"value":"jn19990000029","type":"string"}},"type":"statement","rank":"normal"}],"P9":[{"id":"q42$76d70dc8-4646-cc84-b66c-be9ed1c469e2","mainsnak":{"snaktype":"value","property":"P9","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623673},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"f135b2c65278386de09c49c7a0df1379a4f33715","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P1476":[{"snaktype":"value","property":"P1476","datatype":"monolingualtext","datavalue":{"value":{"text":"Douglas Adams","language":"en"},"type":"monolingualtext"}}]},"snaks-order":["P854","P364","P123","P813","P1476"]}]}],"P22":[{"id":"q42$9ac7fb72-4402-8d72-f588-a170ca5e715c","mainsnak":{"snaktype":"value","property":"P22","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623675},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"39f28d946862969dc1ab62b6c13d5404228d4303","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P25":[{"id":"q42$cf4cccbe-470e-e627-86a3-70ef115f601c","mainsnak":{"snaktype":"value","property":"P25","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623678},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"39f28d946862969dc1ab62b6c13d5404228d4303","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P26":[{"id":"q42$b88670f8-456b-3ecb-cf3d-2bca2cf7371e","mainsnak":{"snaktype":"value","property":"P26","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623681},"type":"wikibase-entityid"}},"qualifiers":{"P580":[{"hash":"7bb58b362182d2e38c56a6ff80615bee21bd569f","snaktype":"value","property":"P580","datatype":"time","datavalue":{"value":{"time":"+00000001991-11-25T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P582":[{"hash":"a7686feea23c232690b4c31d4576b74913d7f9c1","snaktype":"value","property":"P582","datatype":"time","datavalue":{"value":{"time":"+00000002001-05-11T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"qualifiers-order":["P580","P582"],"type":"statement","rank":"normal","references":[{"hash":"39f28d946862969dc1ab62b6c13d5404228d4303","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P40":[{"id":"q42$70b600fa-4c0a-b3e6-9e19-1486e71c99fb","mainsnak":{"snaktype":"value","property":"P40","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":14623683},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"39f28d946862969dc1ab62b6c13d5404228d4303","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]}]}],"P409":[{"id":"q42$506fc7c8-439d-b77f-5041-8ca85659ad57","mainsnak":{"snaktype":"value","property":"P409","datatype":"string","datavalue":{"value":"35163268","type":"string"}},"type":"statement","rank":"normal"}],"P910":[{"id":"Q42$3B111597-2138-4517-85AD-FD0056D3DEB0","mainsnak":{"snaktype":"value","property":"P910","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":8935487},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P906":[{"id":"Q42$D92DF8AE-786C-4C3E-8A33-BABD8CB06D31","mainsnak":{"snaktype":"value","property":"P906","datatype":"string","datavalue":{"value":"230807","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"6db5f234c81ddf3171f0971c57e1ac2c834b2796","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1798125},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P950":[{"id":"Q42$856BE41B-546B-4381-B671-07DC17E1F677","mainsnak":{"snaktype":"value","property":"P950","datatype":"string","datavalue":{"value":"XX1149955","type":"string"}},"type":"statement","rank":"normal"}],"P1006":[{"id":"Q42$B7643D02-6EF0-4932-A36A-3A2D4DA3F578","mainsnak":{"snaktype":"value","property":"P1006","datatype":"string","datavalue":{"value":"068744307","type":"string"}},"type":"statement","rank":"normal"}],"P1005":[{"id":"Q42$35342507-3E6E-4F3C-9BB6-F05C9F7DBD95","mainsnak":{"snaktype":"value","property":"P1005","datatype":"string","datavalue":{"value":"68537","type":"string"}},"type":"statement","rank":"normal"}],"P949":[{"id":"Q42$2D50AE02-2BD8-4F82-9DFD-B3166DEFDEC1","mainsnak":{"snaktype":"value","property":"P949","datatype":"string","datavalue":{"value":"000163846","type":"string"}},"type":"statement","rank":"normal"}],"P396":[{"id":"Q42$b4c088b8-4bd9-c037-6b4e-7a0be3730947","mainsnak":{"snaktype":"value","property":"P396","datatype":"string","datavalue":{"value":"IT\\ICCU\\RAVV\\034417","type":"string"}},"type":"statement","rank":"normal"}],"P646":[{"id":"Q42$48D9C731-BDA8-45D6-B593-437CD10A51B4","mainsnak":{"snaktype":"value","property":"P646","datatype":"string","datavalue":{"value":"/m/0282x","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"b923b0d68beb300866b87ead39f61e63ec30d8af","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":15241312},"type":"wikibase-entityid"}}],"P577":[{"snaktype":"value","property":"P577","datatype":"time","datavalue":{"value":{"time":"+00000002013-10-28T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P248","P577"]}]}],"P69":[{"id":"q42$0E9C4724-C954-4698-84A7-5CE0D296A6F2","mainsnak":{"snaktype":"value","property":"P69","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":35794},"type":"wikibase-entityid"}},"qualifiers":{"P582":[{"hash":"fe48905ec11215e5a6d03539b37d44c73e4eab39","snaktype":"value","property":"P582","datatype":"time","datavalue":{"value":{"time":"+00000001974-01-01T00:00:00Z","timezone":0,"before":0,"after":0,"precision":9,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}],"P812":[{"hash":"81b44430e63da20d9bffc9bad4b244a1a6d30e93","snaktype":"value","property":"P812","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":186579},"type":"wikibase-entityid"}}],"P512":[{"hash":"158d7693369e716aaae6bef281ee0921a2fc5bb2","snaktype":"value","property":"P512","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1765120},"type":"wikibase-entityid"}}]},"qualifiers-order":["P582","P812","P512"],"type":"statement","rank":"normal","references":[{"hash":"39f28d946862969dc1ab62b6c13d5404228d4303","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"http://www.nndb.com/people/731/000023662/","type":"string"}}],"P364":[{"snaktype":"value","property":"P364","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}],"P357":[{"snaktype":"value","property":"P357","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}],"P123":[{"snaktype":"value","property":"P123","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1373513},"type":"wikibase-entityid"}}],"P813":[{"snaktype":"value","property":"P813","datatype":"time","datavalue":{"value":{"time":"+00000002013-12-07T00:00:00Z","timezone":0,"before":0,"after":0,"precision":11,"calendarmodel":"http://www.wikidata.org/entity/Q1985727"},"type":"time"}}]},"snaks-order":["P854","P364","P357","P123","P813"]},{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]},{"id":"Q42$7BC309BF-DC35-468D-A7D3-3785922B0B1F","mainsnak":{"snaktype":"value","property":"P69","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":691283},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"},{"id":"Q42$9a22ca61-4543-4039-67e8-b8b472c629de","mainsnak":{"snaktype":"value","property":"P69","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":4961791},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P1273":[{"id":"Q42$4A2873C0-D848-4F3D-8066-38204E50414C","mainsnak":{"snaktype":"value","property":"P1273","datatype":"string","datavalue":{"value":"a10667040","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"1fe0761d4c6964bd0083fc8af5f2a4d18d707aa6","snaks":{"P854":[{"snaktype":"value","property":"P854","datatype":"url","datavalue":{"value":"https://viaf.org/viaf/113230702/","type":"string"}}]},"snaks-order":["P854"]}]}],"P1415":[{"id":"Q42$F4EC4761-2DCC-4106-8156-D5D36B5FA29A","mainsnak":{"snaktype":"value","property":"P1415","datatype":"string","datavalue":{"value":"101075853","type":"string"}},"type":"statement","rank":"normal"}],"P108":[{"id":"Q42$853B16C8-1AB3-489A-831E-AEAD7E94AB87","mainsnak":{"snaktype":"value","property":"P108","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":9531},"type":"wikibase-entityid"}},"type":"statement","rank":"normal"}],"P1417":[{"id":"Q42$23C4CC8F-2AEE-4730-80E1-288C273E8D3B","mainsnak":{"snaktype":"value","property":"P1417","datatype":"string","datavalue":{"value":"5111","type":"string"}},"qualifiers":{"P958":[{"hash":"f7236db820d284e3ae3c9bfb2f12362e9a7f500e","snaktype":"value","property":"P958","datatype":"string","datavalue":{"value":"Douglas Adams","type":"string"}}]},"qualifiers-order":["P958"],"type":"statement","rank":"normal"}],"P800":[{"id":"Q42$FA73986E-3D1D-4CAB-B358-424B58544620","mainsnak":{"snaktype":"value","property":"P800","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":25169},"type":"wikibase-entityid"}},"type":"statement","rank":"normal","references":[{"hash":"8f8bb308b61e4e0cff924b9eb7d783d003fc3ce7","snaks":{"P248":[{"snaktype":"value","property":"P248","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":5375741},"type":"wikibase-entityid"}}]},"snaks-order":["P248"]}]}],"P998":[{"id":"Q42$BE724F6B-6981-4DE9-B90C-338768A4BFC4","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"Arts/Literature/Authors/A/Adams,_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"17da29e56d69809fde8793aaa4864de2e6bb5780","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":1860},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"preferred"},{"id":"Q42$5776B538-2441-4B9E-9C39-4E6289396763","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Dansk/Kultur/Litteratur/Forfattere/A/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"eed80ca4e1ffc12b82c55116042dabdb873707ad","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":9035},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$B60CF952-9C65-4875-A4BA-6B8516C81E99","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Fran%C3%A7ais/Arts/Litt%C3%A9rature/Genres/Science-fiction_et_fantastique/Auteurs/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"3be4fb23771c9decf6c908552444e6753215dcf4","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":150},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$A0B48E74-C934-42B9-A583-FB3EAE4BC9BA","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Deutsch/Kultur/Literatur/Autoren_und_Autorinnen/A/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"bfab56097f2ee29b68110953c09618468db6871b","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":188},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$F2632AC4-6F24-49E4-9E4E-B008F26BA8CE","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Italiano/Arte/Letteratura/Autori/A/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"a77ef6d322e3915085c305de616027d3f709c807","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":652},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"},{"id":"Q42$84B82B5A-8F33-4229-B988-BF960E676875","mainsnak":{"snaktype":"value","property":"P998","datatype":"string","datavalue":{"value":"World/Svenska/Kultur/Litteratur/Genre/Science_fiction_och_fantasy/F%C3%B6rfattare/Adams%2C_Douglas","type":"string"}},"qualifiers":{"P407":[{"hash":"feef8b68d719a5caffb99cd28280ed8133f04965","snaktype":"value","property":"P407","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":9027},"type":"wikibase-entityid"}}]},"qualifiers-order":["P407"],"type":"statement","rank":"normal"}],"P1233":[{"id":"Q42$9F55FA72-F9E5-41E4-A771-041EB1D59C28","mainsnak":{"snaktype":"value","property":"P1233","datatype":"string","datavalue":{"value":"122","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"004ec6fbee857649acdbdbad4f97b2c8571df97b","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":48183},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1207":[{"id":"Q42$00ddd8cf-48fa-609f-dd4e-977e9672c96f","mainsnak":{"snaktype":"value","property":"P1207","datatype":"string","datavalue":{"value":"n94004172","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1375":[{"id":"Q42$97db6877-4c06-88ce-2db5-aaba53383fd2","mainsnak":{"snaktype":"value","property":"P1375","datatype":"string","datavalue":{"value":"000010283","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1670":[{"id":"Q42$2370b5b3-487b-89dd-ad93-b023a2a86ac4","mainsnak":{"snaktype":"value","property":"P1670","datatype":"string","datavalue":{"value":"0052C2705","type":"string"}},"type":"statement","rank":"normal","references":[{"hash":"a51d6594fee36c7452eaed2db35a4833613a7078","snaks":{"P143":[{"snaktype":"value","property":"P143","datatype":"wikibase-item","datavalue":{"value":{"entity-type":"item","numeric-id":54919},"type":"wikibase-entityid"}}]},"snaks-order":["P143"]}]}],"P1284":[{"id":"Q42$2EE16C9C-B74B-4322-9542-4A132555B363","mainsnak":{"snaktype":"value","property":"P1284","datatype":"string","datavalue":{"value":"00000020676","type":"string"}},"type":"statement","rank":"normal"}],"P866":[{"id":"Q42$A29644ED-0377-4F88-8BA6-FAAB7DE8C7BA","mainsnak":{"snaktype":"value","property":"P866","datatype":"string","datavalue":{"value":"douglas-adams","type":"string"}},"type":"statement","rank":"normal"}]},"sitelinks":{"arwiki":{"site":"arwiki","title":"\u062f\u0648\u063a\u0644\u0627\u0633 \u0622\u062f\u0645\u0632","badges":[]},"arzwiki":{"site":"arzwiki","title":"\u062f\u0648\u062c\u0644\u0627\u0633 \u0627\u062f\u0627\u0645\u0632","badges":[]},"azwikiquote":{"site":"azwikiquote","title":"Duqlas Noel Adams","badges":[]},"barwiki":{"site":"barwiki","title":"Douglas Adams","badges":[]},"be_x_oldwiki":{"site":"be_x_oldwiki","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0437","badges":[]},"bewiki":{"site":"bewiki","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"bgwiki":{"site":"bgwiki","title":"\u0414\u044a\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"bgwikiquote":{"site":"bgwikiquote","title":"\u0414\u044a\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"bswiki":{"site":"bswiki","title":"Douglas Adams","badges":[]},"bswikiquote":{"site":"bswikiquote","title":"Douglas Adams","badges":[]},"cawiki":{"site":"cawiki","title":"Douglas Adams","badges":[]},"cswiki":{"site":"cswiki","title":"Douglas Adams","badges":[]},"cswikiquote":{"site":"cswikiquote","title":"Douglas Adams","badges":[]},"cywiki":{"site":"cywiki","title":"Douglas Adams","badges":[]},"dawiki":{"site":"dawiki","title":"Douglas Adams","badges":[]},"dewiki":{"site":"dewiki","title":"Douglas Adams","badges":[]},"dewikiquote":{"site":"dewikiquote","title":"Douglas Adams","badges":[]},"elwiki":{"site":"elwiki","title":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u0386\u03bd\u03c4\u03b1\u03bc\u03c2","badges":[]},"elwikiquote":{"site":"elwikiquote","title":"\u039d\u03c4\u03ac\u03b3\u03ba\u03bb\u03b1\u03c2 \u0386\u03bd\u03c4\u03b1\u03bc\u03c2","badges":[]},"enwiki":{"site":"enwiki","title":"Douglas Adams","badges":[]},"enwikiquote":{"site":"enwikiquote","title":"Douglas Adams","badges":[]},"eowiki":{"site":"eowiki","title":"Douglas Adams","badges":[]},"eowikiquote":{"site":"eowikiquote","title":"Douglas Adams","badges":[]},"eswiki":{"site":"eswiki","title":"Douglas Adams","badges":[]},"eswikiquote":{"site":"eswikiquote","title":"Douglas Adams","badges":[]},"etwiki":{"site":"etwiki","title":"Douglas Adams","badges":[]},"etwikiquote":{"site":"etwikiquote","title":"Douglas Adams","badges":[]},"euwiki":{"site":"euwiki","title":"Douglas Adams","badges":[]},"fawiki":{"site":"fawiki","title":"\u062f\u0627\u06af\u0644\u0627\u0633 \u0622\u062f\u0627\u0645\u0632","badges":[]},"fawikiquote":{"site":"fawikiquote","title":"\u062f\u0627\u06af\u0644\u0627\u0633 \u0622\u062f\u0627\u0645\u0632","badges":[]},"fiwiki":{"site":"fiwiki","title":"Douglas Adams","badges":[]},"fiwikiquote":{"site":"fiwikiquote","title":"Douglas Adams","badges":[]},"frwiki":{"site":"frwiki","title":"Douglas Adams","badges":[]},"frwikiquote":{"site":"frwikiquote","title":"Douglas Adams","badges":[]},"gawiki":{"site":"gawiki","title":"Douglas Adams","badges":[]},"glwiki":{"site":"glwiki","title":"Douglas Adams","badges":[]},"glwikiquote":{"site":"glwikiquote","title":"Douglas Adams","badges":[]},"hewiki":{"site":"hewiki","title":"\u05d3\u05d0\u05d2\u05dc\u05e1 \u05d0\u05d3\u05de\u05e1","badges":[]},"hewikiquote":{"site":"hewikiquote","title":"\u05d3\u05d0\u05d2\u05dc\u05e1 \u05d0\u05d3\u05de\u05e1","badges":[]},"hrwiki":{"site":"hrwiki","title":"Douglas Adams","badges":[]},"huwiki":{"site":"huwiki","title":"Douglas Adams","badges":[]},"huwikiquote":{"site":"huwikiquote","title":"Douglas Adams","badges":[]},"hywiki":{"site":"hywiki","title":"\u0534\u0578\u0582\u0563\u056c\u0561\u057d \u0531\u0564\u0561\u0574\u057d","badges":[]},"hywikiquote":{"site":"hywikiquote","title":"\u0534\u0578\u0582\u0563\u056c\u0561\u057d \u0531\u0564\u0561\u0574\u057d","badges":[]},"idwiki":{"site":"idwiki","title":"Douglas Adams","badges":[]},"iowiki":{"site":"iowiki","title":"Douglas Adams","badges":[]},"iswiki":{"site":"iswiki","title":"Douglas Adams","badges":[]},"itwiki":{"site":"itwiki","title":"Douglas Adams","badges":[]},"itwikiquote":{"site":"itwikiquote","title":"Douglas Adams","badges":[]},"jawiki":{"site":"jawiki","title":"\u30c0\u30b0\u30e9\u30b9\u30fb\u30a2\u30c0\u30e0\u30ba","badges":[]},"jvwiki":{"site":"jvwiki","title":"Douglas Adams","badges":[]},"kawiki":{"site":"kawiki","title":"\u10d3\u10d0\u10d2\u10da\u10d0\u10e1 \u10d0\u10d3\u10d0\u10db\u10e1\u10d8","badges":[]},"kowiki":{"site":"kowiki","title":"\ub354\uae00\ub7ec\uc2a4 \uc560\ub364\uc2a4","badges":[]},"lawiki":{"site":"lawiki","title":"Duglassius Adams","badges":[]},"liwikiquote":{"site":"liwikiquote","title":"Douglas Adams","badges":[]},"ltwikiquote":{"site":"ltwikiquote","title":"Douglas Adamsas","badges":[]},"lvwiki":{"site":"lvwiki","title":"Duglass Adamss","badges":[]},"mgwiki":{"site":"mgwiki","title":"Douglas Adams","badges":[]},"mkwiki":{"site":"mkwiki","title":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"mlwiki":{"site":"mlwiki","title":"\u0d21\u0d17\u0d4d\u0d32\u0d38\u0d4d \u0d06\u0d21\u0d02\u0d38\u0d4d","badges":[]},"mrwiki":{"site":"mrwiki","title":"\u0921\u0917\u094d\u0932\u0938 \u0905\u200d\u0945\u0921\u092e\u094d\u0938","badges":[]},"nlwiki":{"site":"nlwiki","title":"Douglas Adams","badges":[]},"nlwikiquote":{"site":"nlwikiquote","title":"Douglas Adams","badges":[]},"nnwiki":{"site":"nnwiki","title":"Douglas Adams","badges":[]},"nowiki":{"site":"nowiki","title":"Douglas Adams","badges":[]},"ocwiki":{"site":"ocwiki","title":"Douglas Adams","badges":[]},"plwiki":{"site":"plwiki","title":"Douglas Adams","badges":[]},"plwikiquote":{"site":"plwikiquote","title":"Douglas Adams","badges":[]},"ptwiki":{"site":"ptwiki","title":"Douglas Adams","badges":[]},"ptwikiquote":{"site":"ptwikiquote","title":"Douglas Adams","badges":[]},"rowiki":{"site":"rowiki","title":"Douglas Adams","badges":[]},"ruwiki":{"site":"ruwiki","title":"\u0410\u0434\u0430\u043c\u0441, \u0414\u0443\u0433\u043b\u0430\u0441","badges":[]},"ruwikiquote":{"site":"ruwikiquote","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u041d\u043e\u044d\u043b\u044c \u0410\u0434\u0430\u043c\u0441","badges":[]},"scowiki":{"site":"scowiki","title":"Douglas Adams","badges":[]},"shwiki":{"site":"shwiki","title":"Douglas Adams","badges":[]},"simplewiki":{"site":"simplewiki","title":"Douglas Adams","badges":[]},"simplewikiquote":{"site":"simplewikiquote","title":"Douglas Adams","badges":[]},"skwiki":{"site":"skwiki","title":"Douglas Adams","badges":[]},"skwikiquote":{"site":"skwikiquote","title":"Douglas Adams","badges":[]},"slwiki":{"site":"slwiki","title":"Douglas Adams","badges":[]},"sqwiki":{"site":"sqwiki","title":"Douglas Adams","badges":[]},"srwiki":{"site":"srwiki","title":"\u0414\u0430\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"svwiki":{"site":"svwiki","title":"Douglas Adams","badges":[]},"svwikiquote":{"site":"svwikiquote","title":"Douglas Adams","badges":[]},"tawiki":{"site":"tawiki","title":"\u0b9f\u0b95\u0bcd\u0bb3\u0bb8\u0bcd \u0b86\u0b9f\u0bae\u0bcd\u0bb8\u0bcd","badges":[]},"trwiki":{"site":"trwiki","title":"Douglas Adams","badges":[]},"trwikiquote":{"site":"trwikiquote","title":"Douglas Adams","badges":[]},"ukwiki":{"site":"ukwiki","title":"\u0414\u0443\u0433\u043b\u0430\u0441 \u0410\u0434\u0430\u043c\u0441","badges":[]},"urwiki":{"site":"urwiki","title":"\u0688\u06af\u0644\u0633 \u0627\u06cc\u0688\u0645\u0633","badges":[]},"vepwiki":{"site":"vepwiki","title":"Adams Duglas","badges":[]},"viwiki":{"site":"viwiki","title":"Douglas Adams","badges":[]},"warwiki":{"site":"warwiki","title":"Douglas Adams","badges":[]},"zhwiki":{"site":"zhwiki","title":"\u9053\u683c\u62c9\u65af\u00b7\u4e9a\u5f53\u65af","badges":[]},"zhwikiquote":{"site":"zhwikiquote","title":"\u9053\u683c\u62c9\u65af\u00b7\u4e9e\u7576\u65af","badges":[]}}},"P31":{"pageid":3918489,"ns":120,"title":"Property:P31","lastrevid":199399943,"modified":"2015-02-24T17:23:05Z","id":"P31","type":"property","aliases":{"fr":[{"language":"fr","value":"est un"},{"language":"fr","value":"est une"},{"language":"fr","value":"rdf:type"}],"de":[{"language":"de","value":"ist Instanz von"},{"language":"de","value":"Instanz von"},{"language":"de","value":"ist eine Instanz von"},{"language":"de","value":"war ein(e)"}],"he":[{"language":"he","value":"\u05d4\u05d9\u05d0"},{"language":"he","value":"\u05d3\u05d5\u05d2\u05de\u05d4 \u05e9\u05dc"},{"language":"he","value":"\u05de\u05e7\u05e8\u05d4 \u05e9\u05dc"}],"en":[{"language":"en","value":"is a"},{"language":"en","value":"is an"},{"language":"en","value":"rdf:type"}],"nb":[{"language":"nb","value":"element av"},{"language":"nb","value":"eksempel p\u00e5"},{"language":"nb","value":"utgave av"},{"language":"nb","value":"eksemplar av"},{"language":"nb","value":"instans av"},{"language":"nb","value":"er"}],"ru":[{"language":"ru","value":"\u043f\u0440\u0435\u0434\u0441\u0442\u0430\u0432\u043b\u044f\u0435\u0442 \u0441\u043e\u0431\u043e\u0439"},{"language":"ru","value":"\u044f\u0432\u043b\u044f\u0435\u0442\u0441\u044f"},{"language":"ru","value":"\u0447\u0430\u0441\u0442\u043d\u044b\u0439 \u0441\u043b\u0443\u0447\u0430\u0439 \u0434\u043b\u044f"},{"language":"ru","value":"\u044f\u0432\u043b\u044f\u0435\u0442\u0441\u044f \u043e\u0434\u043d\u0438\u043c \u0438\u0437 \u044d\u043a\u0437\u0435\u043c\u043f\u043b\u044f\u0440\u043e\u0432"},{"language":"ru","value":"\u043f\u0440\u0438\u043d\u0430\u0434\u043b\u0435\u0436\u0438\u0442 \u043a\u043b\u0430\u0441\u0441\u0443"},{"language":"ru","value":"\u044f\u0432\u043b\u044f\u0435\u0442\u0441\u044f \u043e\u0434\u043d\u0438\u043c \u0438\u0437"},{"language":"ru","value":"\u044d\u043a\u0437\u0435\u043c\u043f\u043b\u044f\u0440 \u043e\u0442"},{"language":"ru","value":"\u043a\u043b\u0430\u0441\u0441 \u043e\u0431\u044a\u0435\u043a\u0442\u0430:"},{"language":"ru","value":"\u0432\u0438\u0434 \u0441\u0443\u0449\u043d\u043e\u0441\u0442\u0438"},{"language":"ru","value":"\u044d\u0442\u043e"}],"vi":[{"language":"vi","value":"l\u00e0 c\u00e1i"}],"ca":[{"language":"ca","value":"ONU \u00e9s"},{"language":"ca","value":"Es Una"},{"language":"ca","value":"es Instancia de"}],"th":[{"language":"th","value":"\u0e04\u0e37\u0e2d"}],"uk":[{"language":"uk","value":"\u0454"},{"language":"uk","value":"\u0446\u0435"}],"ilo":[{"language":"ilo","value":"ket maysa a"},{"language":"ilo","value":"ket maysa nga"}],"fa":[{"language":"fa","value":"\u0647\u0633\u062a \u06cc\u06a9"},{"language":"fa","value":"\u0627\u0633\u062a \u06cc\u06a9:"},{"language":"fa","value":"\u06cc\u06a9 \u0645\u062b\u0627\u0644 \u0627\u0632"},{"language":"fa","value":"\u0646\u0645\u0648\u0646\u0647 \u0627\u06cc \u0627\u0632"},{"language":"fa","value":"\u0645\u062b\u0627\u0644\u06cc \u0627\u0632"},{"language":"fa","value":"\u0627\u0633\u062a \u06cc\u06a9"},{"language":"fa","value":"\u0632\u06cc\u0631\u0645\u062c\u0645\u0648\u0639\u0647\u0654"}],"ro":[{"language":"ro","value":"este o"},{"language":"ro","value":"este un"},{"language":"ro","value":"este o/un"}],"es":[{"language":"es","value":"es una"},{"language":"es","value":"es un"},{"language":"es","value":"es un/una"}],"it":[{"language":"it","value":"\u00e8 un"},{"language":"it","value":"\u00e8 una"},{"language":"it","value":"\u00e8 uno"}],"en-ca":[{"language":"en-ca","value":"is a"},{"language":"en-ca","value":"is an"}],"en-gb":[{"language":"en-gb","value":"is a"},{"language":"en-gb","value":"is an"}],"ja":[{"language":"ja","value":"\u5206\u985e"},{"language":"ja","value":"\u7a2e\u985e"},{"language":"ja","value":"\u30a4\u30f3\u30b9\u30bf\u30f3\u30b9\u306e\u5143"},{"language":"ja","value":"\u5b9f\u4f53\u306e\u5143"},{"language":"ja","value":"is a"},{"language":"ja","value":"is-a"},{"language":"ja","value":"\u30af\u30e9\u30b9"}],"de-at":[{"language":"de-at","value":"ist ein"},{"language":"de-at","value":"ist eine"},{"language":"de-at","value":"ist Instanz von"},{"language":"de-at","value":"Instanz von"}],"nl":[{"language":"nl","value":"instantie van"},{"language":"nl","value":"exemplaar van"},{"language":"nl","value":"was een"},{"language":"nl","value":"rdf:type"}],"de-ch":[{"language":"de-ch","value":"ist ein"},{"language":"de-ch","value":"ist eine"},{"language":"de-ch","value":"ist Instanz von"},{"language":"de-ch","value":"Instanz von"}],"bar":[{"language":"bar","value":"is a"},{"language":"bar","value":"is a Instanz vo"},{"language":"bar","value":"Instanz vo"}],"sh":[{"language":"sh","value":"je"},{"language":"sh","value":"su"},{"language":"sh","value":"jest"}],"bg":[{"language":"bg","value":"e"}],"zh":[{"language":"zh","value":"\u662f\u4e00\u4e2a"},{"language":"zh","value":"\u6027\u8d28"},{"language":"zh","value":"\u662f\u4e00\u500b"},{"language":"zh","value":"\u6027\u8cea"}],"zh-hant":[{"language":"zh-hant","value":"\u662f\u4e00\u500b"}],"yi":[{"language":"yi","value":"\u05d0\u05d9\u05d6 \u05d0"},{"language":"yi","value":"\u05d0\u05d9\u05d6 \u05d0\u05df"}],"cs":[{"language":"cs","value":"je"}],"fi":[{"language":"fi","value":"on"},{"language":"fi","value":"kuuluu ryhm\u00e4\u00e4n"}],"be-tarask":[{"language":"be-tarask","value":"\u0432\u044b\u043a\u043b\u044e\u0447\u043d\u044b \u0432\u044b\u043f\u0430\u0434\u0430\u043a \u043f\u0430\u043d\u044f\u0442\u043a\u0443"}],"pt":[{"language":"pt","value":"\u00e9 um"},{"language":"pt","value":"\u00e9 uma"},{"language":"pt","value":"natureza do elemento"}],"te":[{"language":"te","value":"\u0c30\u0c15\u0c02"},{"language":"te","value":"\u0c07\u0c26\u0c3f \u0c12\u0c15"}],"or":[{"language":"or","value":"\u0b09\u0b26\u0b3e\u0b39\u0b30\u0b23"}],"hu":[{"language":"hu","value":"kateg\u00f3ria"},{"language":"hu","value":"p\u00e9ld\u00e1nya ennek"},{"language":"hu","value":"p\u00e9lda erre"}],"als":[{"language":"als","value":"\u00ecsch a"},{"language":"als","value":"isch e"},{"language":"als","value":"isch en"},{"language":"als","value":"isch \u00e2"},{"language":"als","value":"ischt e"}],"zh-cn":[{"language":"zh-cn","value":"\u5c5e\u4e8e"}],"scn":[{"language":"scn","value":"\u00e8' nu"},{"language":"scn","value":"\u00e8' na"},{"language":"scn","value":"\u00e8' un"},{"language":"scn","value":"esemplari di"}],"da":[{"language":"da","value":"er en"},{"language":"da","value":"er et"}],"nn":[{"language":"nn","value":"er ein"},{"language":"nn","value":"er ei"},{"language":"nn","value":"er eit"}],"sv":[{"language":"sv","value":"\u00e4r en"},{"language":"sv","value":"\u00e4r ett"}]},"labels":{"en":{"language":"en","value":"instance of"},"fr":{"language":"fr","value":"nature de l'\u00e9l\u00e9ment"},"de":{"language":"de","value":"ist ein(e)"},"it":{"language":"it","value":"istanza di"},"pt-br":{"language":"pt-br","value":"inst\u00e2ncia de"},"eo":{"language":"eo","value":"estas"},"he":{"language":"he","value":"\u05d4\u05d5\u05d0"},"es":{"language":"es","value":"instancia de"},"zh-hans":{"language":"zh-hans","value":"\u6027\u8d28"},"fi":{"language":"fi","value":"esiintym\u00e4 kohteesta"},"hu":{"language":"hu","value":"ez egy"},"ru":{"language":"ru","value":"\u044d\u0442\u043e \u0447\u0430\u0441\u0442\u043d\u044b\u0439 \u0441\u043b\u0443\u0447\u0430\u0439 \u043f\u043e\u043d\u044f\u0442\u0438\u044f"},"hr":{"language":"hr","value":"je"},"zh-hant":{"language":"zh-hant","value":"\u6027\u8cea"},"nl":{"language":"nl","value":"is een"},"el":{"language":"el","value":"\u03b5\u03af\u03bd\u03b1\u03b9"},"pl":{"language":"pl","value":"jest to"},"sr":{"language":"sr","value":"\u0458\u0435"},"ca":{"language":"ca","value":"\u00e9s inst\u00e0ncia de"},"cs":{"language":"cs","value":"instance (\u010deho)"},"nb":{"language":"nb","value":"forekomst av"},"pt":{"language":"pt","value":"inst\u00e2ncia de"},"ilo":{"language":"ilo","value":"pagarigan iti"},"sl":{"language":"sl","value":"primerek od"},"be":{"language":"be","value":"\u0433\u044d\u0442\u0430"},"ko":{"language":"ko","value":"\uc885\ub958"},"nn":{"language":"nn","value":"f\u00f8rekomst av"},"vi":{"language":"vi","value":"l\u00e0 m\u1ed9t"},"be-tarask":{"language":"be-tarask","value":"\u0430\u0441\u043e\u0431\u043d\u044b \u0432\u044b\u043f\u0430\u0434\u0430\u043a \u043f\u0430\u043d\u044f\u0442\u043a\u0443"},"bs":{"language":"bs","value":"je"},"th":{"language":"th","value":"\u0e40\u0e1b\u0e47\u0e19"},"uk":{"language":"uk","value":"\u0454 \u043e\u0434\u043d\u0438\u043c \u0456\u0437"},"en-gb":{"language":"en-gb","value":"instance of"},"en-ca":{"language":"en-ca","value":"instance of"},"ja":{"language":"ja","value":"\u4ee5\u4e0b\u306e\u5b9f\u4f53"},"uz":{"language":"uz","value":"bu"},"lv":{"language":"lv","value":"ir"},"la":{"language":"la","value":"est"},"fa":{"language":"fa","value":"\u06cc\u06a9 \u0646\u0645\u0648\u0646\u0647 \u0627\u0632"},"sv":{"language":"sv","value":"instans av"},"nds":{"language":"nds","value":"is en"},"ro":{"language":"ro","value":"este un/o"},"ta":{"language":"ta","value":"\u0b86\u0ba9\u0ba4\u0bc1"},"min":{"language":"min","value":"adolah"},"id":{"language":"id","value":"adalah"},"gl":{"language":"gl","value":"\u00e9 un/unha"},"is":{"language":"is","value":"er"},"af":{"language":"af","value":"is 'n"},"ka":{"language":"ka","value":"\u10d0\u10e0\u10d8\u10e1"},"de-at":{"language":"de-at","value":"ist eine Instanz von"},"da":{"language":"da","value":"tilf\u00e6lde af"},"sco":{"language":"sco","value":"instance o"},"sk":{"language":"sk","value":"je"},"de-ch":{"language":"de-ch","value":"ist eine Instanz von"},"bar":{"language":"bar","value":"is a Instanz vo"},"simple":{"language":"simple","value":"instance of"},"bn":{"language":"bn","value":"\u09a8\u09bf\u09a6\u09b0\u09cd\u09b6\u09a8"},"lmo":{"language":"lmo","value":"l'\u00e8 un(a)"},"nds-nl":{"language":"nds-nl","value":"is n"},"sh":{"language":"sh","value":"je(su)"},"br":{"language":"br","value":"doare an elfenn"},"bg":{"language":"bg","value":"\u0435\u043a\u0437\u0435\u043c\u043f\u043b\u044f\u0440 \u043d\u0430"},"mr":{"language":"mr","value":"\u092a\u094d\u0930\u0915\u093e\u0930"},"ckb":{"language":"ckb","value":"\u0646\u0645\u0648\u0648\u0646\u06d5\u06cc\u06d5\u06a9 \u0644\u06d5"},"ar":{"language":"ar","value":"\u062d\u0627\u0644\u0629 \u062e\u0627\u0635\u0629 \u0645\u0646"},"et":{"language":"et","value":"\u00fcksikjuht n\u00e4htusest"},"pcd":{"language":"pcd","value":"est un"},"tr":{"language":"tr","value":"bir"},"hi":{"language":"hi","value":"\u0909\u0926\u0939\u093e\u0930\u0923 \u0939\u0948"},"sr-ec":{"language":"sr-ec","value":"\u0458\u0435"},"co":{"language":"co","value":"istanza di"},"oc":{"language":"oc","value":"natura de l'element"},"mk":{"language":"mk","value":"\u0435"},"yi":{"language":"yi","value":"\u05e4\u05bf\u05d0\u05b7\u05dc"},"zh":{"language":"zh","value":"\u6027\u8d28"},"jbo":{"language":"jbo","value":"serese mupli"},"gu":{"language":"gu","value":"\u0a89\u0aa6\u0abe\u0ab9\u0ab0\u0aa3"},"zh-cn":{"language":"zh-cn","value":"\u6027\u8d28"},"ms":{"language":"ms","value":"contoh"},"tl":{"language":"tl","value":"ay halimbawa ng"},"zh-tw":{"language":"zh-tw","value":"\u6027\u8cea"},"rm":{"language":"rm","value":"\u00e8 in(a)"},"ksh":{"language":"ksh","value":"es e Beischpell f\u00f6r e(n(e))"},"lb":{"language":"lb","value":"geh\u00e9iert zu"},"csb":{"language":"csb","value":"to je"},"ts":{"language":"ts","value":"Nchumu"},"gsw":{"language":"gsw","value":"isch e"},"mzn":{"language":"mzn","value":"\u062f\u0650\u0644\u0650\u0648\u0633"},"zh-hk":{"language":"zh-hk","value":"\u6027\u8cea"},"te":{"language":"te","value":"\u0c05\u0c02\u0c36"},"de-formal":{"language":"de-formal","value":"ist ein/eine"},"or":{"language":"or","value":"\u0b26\u0b43\u0b37\u0b4d\u0b1f\u0b3e\u0b28\u0b4d\u0b24"},"sr-el":{"language":"sr-el","value":"je"},"stq":{"language":"stq","value":"is n(e)"},"als":{"language":"als","value":"isch a"},"ia":{"language":"ia","value":"instantia de"},"nap":{"language":"nap","value":"no tipo 'e"},"yue":{"language":"yue","value":"\u4fc2\u4e00\u500b"},"mg":{"language":"mg","value":"karazana"},"scn":{"language":"scn","value":"esimplari di"},"eu":{"language":"eu","value":"honako hau da"},"fy":{"language":"fy","value":"is in"},"ml":{"language":"ml","value":"\u0d07\u0d28\u0d02"},"tt":{"language":"tt","value":"\u0442\u04e9\u0448\u0435\u043d\u0447\u04d9\u043d\u0435\u04a3 \u0430\u0435\u0440\u044b\u043c \u043e\u0447\u0440\u0430\u0433\u044b"}},"descriptions":{"en":{"language":"en","value":"this item is a specific example and a member of that class"},"it":{"language":"it","value":"questo elemento \u00e8 un'istanza di questa classe, categoria o gruppo di oggetti"},"fr":{"language":"fr","value":"nature de, expression de ou instance de"},"pt-br":{"language":"pt-br","value":"este item \u00e9 uma inst\u00e2ncia deste outro item"},"hu":{"language":"hu","value":"az elem a m\u00e1sik elem p\u00e9ld\u00e1nya"},"hr":{"language":"hr","value":"ova stavka je primjer ove druge stavke"},"de":{"language":"de","value":"Auspr\u00e4gung oder Exemplar einer Sache, Mitglied einer Gruppe"},"el":{"language":"el","value":"\u03b1\u03c5\u03c4\u03cc \u03c4\u03bf \u03b1\u03bd\u03c4\u03b9\u03ba\u03b5\u03af\u03bc\u03b5\u03bd\u03bf \u03b5\u03af\u03bd\u03b1\u03b9 \u03bc\u03b9\u03b1 \u03ad\u03ba\u03c6\u03c1\u03b1\u03c3\u03b7 \u03b1\u03c5\u03c4\u03bf\u03cd \u03c4\u03bf\u03c5 \u03ac\u03bb\u03bb\u03bf\u03c5 \u03b1\u03bd\u03c4\u03b9\u03ba\u03b5\u03af\u03bc\u03b5\u03bd\u03bf\u03c5"},"fi":{"language":"fi","value":"kohde, johon ominaisuus liitet\u00e4\u00e4n, on esiintym\u00e4 ominaisuuden arvoksi asetettavasta kohteesta"},"ilo":{"language":"ilo","value":"daytoy a banag ket maysa a pagarigan iti daytoy a sabali a banag"},"nb":{"language":"nb","value":"dette elementet er et konkret objekt/eksemplar (instans) av denne klassen, kategorien eller objektgruppen"},"es":{"language":"es","value":"este elemento es un ejemplar de otro elemento"},"vi":{"language":"vi","value":"kho\u1ea3n m\u1ee5c n\u00e0y l\u00e0 m\u1ed9t th\u1ef1c th\u1ec3 c\u1ee7a kho\u1ea3n m\u1ee5c kia"},"be-tarask":{"language":"be-tarask","value":"\u0430\u0433\u0443\u043b\u044c\u043d\u0430\u0435 \u0430\u0437\u043d\u0430\u0447\u044d\u043d\u044c\u043d\u0435, \u0447\u044b\u043c \u0437\u044c\u044f\u045e\u043b\u044f\u0435\u0446\u0446\u0430 \u0430\u0431\u2019\u0435\u043a\u0442"},"ja":{"language":"ja","value":"\u3053\u306e\u9805\u76ee\u3092\u30a4\u30f3\u30b9\u30bf\u30f3\u30b9\uff08\u5b9f\u4f53\uff09\u3068\u3059\u308b\u7a2e\u985e\u30fb\u6982\u5ff5"},"en-gb":{"language":"en-gb","value":"this item is an instance of this other item"},"en-ca":{"language":"en-ca","value":"the subject is an instance of the object"},"pl":{"language":"pl","value":"stanowi przyk\u0142ad (jest elementem) danej kategorii/klasy"},"lv":{"language":"lv","value":"\u0161\u012b vien\u012bba ir \u0161\u012bs citas vien\u012bbas instance"},"ca":{"language":"ca","value":"aquest element \u00e9s un objecte concret (inst\u00e0ncia) d'aquesta classe, categoria o grup d'objectes"},"sv":{"language":"sv","value":"\u00e4r ett konkret objekt (instans) av denna klass, kategori eller objektgrupp"},"fa":{"language":"fa","value":"\u0622\u06cc\u062a\u0645 \u06cc\u06a9 \u0646\u0648\u0639 ... \u0627\u0633\u062a"},"gl":{"language":"gl","value":"o elemento \u00e9 unha instancia doutro elemento"},"is":{"language":"is","value":"\u00deessi hlutur er d\u00e6mi um annan hlut"},"nl":{"language":"nl","value":"dit item is een exemplaar (instantie) van deze groep elementen"},"ru":{"language":"ru","value":"\u0434\u0430\u043d\u043d\u044b\u0439 \u044d\u043b\u0435\u043c\u0435\u043d\u0442 \u043f\u0440\u0435\u0434\u0441\u0442\u0430\u0432\u043b\u044f\u0435\u0442 \u0441\u043e\u0431\u043e\u0439 \u043a\u043e\u043d\u043a\u0440\u0435\u0442\u043d\u044b\u0439 \u043e\u0431\u044a\u0435\u043a\u0442 (\u044d\u043a\u0437\u0435\u043c\u043f\u043b\u044f\u0440 / \u0447\u0430\u0441\u0442\u043d\u044b\u0439 \u0441\u043b\u0443\u0447\u0430\u0439) \u043a\u043b\u0430\u0441\u0441\u0430, \u043a\u0430\u0442\u0435\u0433\u043e\u0440\u0438\u0438 \u0438\u043b\u0438 \u0433\u0440\u0443\u043f\u043f\u044b \u043e\u0431\u044a\u0435\u043a\u0442\u043e\u0432"},"uk":{"language":"uk","value":"\u0446\u0435\u0439 \u0435\u043b\u0435\u043c\u0435\u043d\u0442 \u0454 \u0447\u0430\u0441\u0442\u0438\u043d\u043e\u044e \u043c\u043d\u043e\u0436\u0438\u043d\u0438 \u0456\u043d\u0448\u0438\u0445 \u0435\u043b\u0435\u043c\u0435\u043d\u0442\u0456\u0432"},"de-at":{"language":"de-at","value":"Auspr\u00e4gung oder Exemplar einer Sache"},"da":{"language":"da","value":"dette emne er et konkret objekt af denne kategori, klasse eller objektgruppe."},"ro":{"language":"ro","value":"acest element este un exemplar din clasa definit\u0103 de acel element"},"de-ch":{"language":"de-ch","value":"Auspr\u00e4gung oder Exemplar einer Sache"},"nds-nl":{"language":"nds-nl","value":"dit item is n eksemplaor/instansie van t tweede item (Veurbeeld: \"Mark Rutte\" is nen \"politieker\")"},"he":{"language":"he","value":"\u05d4\u05e0\u05d3\u05d5\u05df \u05d4\u05d5\u05d0 \u05de\u05e7\u05e8\u05d4 \u05e9\u05dc"},"bg":{"language":"bg","value":"\u043e\u0431\u0435\u043a\u0442\u044a\u0442 \u0435 \u0435\u043a\u0437\u0435\u043c\u043f\u043b\u044f\u0440 \u043e\u0442 \u0434\u0430\u0434\u0435\u043d \u043a\u043b\u0430\u0441"},"hi":{"language":"hi","value":"\u092f\u0939 \u0906\u0907\u091f\u092e \u0907\u0938 \u0905\u0928\u094d\u092f \u0906\u0907\u091f\u092e \u0915\u093e \u0909\u0926\u0939\u093e\u0930\u0923 \u0939\u0948"},"sr":{"language":"sr","value":"\u043e\u0432\u0430 \u0441\u0442\u0430\u0432\u043a\u0430 \u0458\u0435 \u043a\u043e\u043d\u043a\u0440\u0435\u0442\u0430\u043d \u043e\u0431\u0458\u0435\u043a\u0430\u0442 (\u0438\u043d\u0441\u0442\u0430\u043d\u0446\u0430) \u043a\u043b\u0430\u0441\u0435, \u043a\u0430\u0442\u0435\u0433\u043e\u0440\u0438\u0458\u0435 \u0438\u043b\u0438 \u0433\u0440\u0443\u043f\u0435 \u043e\u0431\u0458\u0435\u043a\u0430\u0442\u0430"},"sr-ec":{"language":"sr-ec","value":"\u043e\u0432\u0430 \u0441\u0442\u0430\u0432\u043a\u0430 \u0458\u0435 \u043a\u043e\u043d\u043a\u0440\u0435\u0442\u0430\u043d \u043e\u0431\u0458\u0435\u043a\u0430\u0442 (\u0438\u043d\u0441\u0442\u0430\u043d\u0446\u0430) \u043a\u043b\u0430\u0441\u0435, \u043a\u0430\u0442\u0435\u0433\u043e\u0440\u0438\u0458\u0435 \u0438\u043b\u0438 \u0433\u0440\u0443\u043f\u0435 \u043e\u0431\u0458\u0435\u043a\u0430\u0442\u0430"},"mk":{"language":"mk","value":"\u043f\u0440\u0435\u0434\u043c\u0435\u0442\u043e\u0442 \u0435 \u043f\u0440\u0438\u043c\u0435\u0440\u043e\u043a/\u0441\u043b\u0443\u0447\u0430\u0458 \u043d\u0430 \u0434\u0440\u0443\u0433 \u043f\u0440\u0435\u0434\u043c\u0435\u0442"},"cs":{"language":"cs","value":"tato polo\u017eka je jedna konkr\u00e9tn\u00ed v\u011bc (exempl\u00e1\u0159, p\u0159\u00edklad) pat\u0159\u00edc\u00ed do t\u00e9to t\u0159\u00eddy, kategorie nebo skupiny p\u0159edm\u011bt\u016f"},"gu":{"language":"gu","value":"\u0a86 \u0ab2\u0ac7\u0a96 \u0a86 \u0aaa\u0acd\u0ab0\u0a95\u0abe\u0ab0 \u0a85\u0aa5\u0ab5\u0abe \u0ab6\u0acd\u0ab0\u0ac7\u0aa3\u0ac0\u0aa8\u0abe \u0a85\u0aa8\u0acd\u0aaf \u0ab2\u0ac7\u0a96\u0acb\u0aa8\u0ac1\u0a82 \u0ab8\u0a9a\u0acb\u0a9f \u0a89\u0aa6\u0abe\u0ab9\u0ab0\u0aa3 \u0a9b\u0ac7."},"ksh":{"language":"ksh","value":"di Saach es ene beschtemmpte, konkrete J\u00e4\u00e4jeschtand vun d\u00e4 Zoot, udder Jropp, udder d\u00e4 Aat"},"eo":{"language":"eo","value":"tiu \u0109i ero estas konkreta a\u0135o (instanco) de tiu \u0109i klaso, kategorio a\u016d objektogrupo"},"ko":{"language":"ko","value":"\ud56d\ubaa9\uc774 \uc18d\ud558\ub294 \uacf3"},"oc":{"language":"oc","value":"natura de, expression de o exemplar de"},"mzn":{"language":"mzn","value":"\u0622\u06cc\u062a\u0645 \u062c\u0648\u0631"},"zh":{"language":"zh","value":"\u9805\u6240\u5c6c\u7684\uff0c\u4ee5\u9805\u70ba\u5be6\u4f8b\u7684\u985e\u5225"},"zh-hk":{"language":"zh-hk","value":"\u9805\u6240\u5c6c\u7684\uff0c\u4ee5\u9805\u70ba\u5be6\u4f8b\u7684\u985e\u5225"},"pt":{"language":"pt","value":"este item \u00e9 uma inst\u00e2ncia deste outro item"},"zh-hans":{"language":"zh-hans","value":"\u9879\u6240\u5c5e\u7684\uff0c\u4ee5\u9879\u4e3a\u5b9e\u4f8b\u7684\u7c7b\u522b"},"zh-cn":{"language":"zh-cn","value":"\u9879\u6240\u5c5e\u7684\uff0c\u4ee5\u9879\u4e3a\u5b9e\u4f8b\u7684\u7c7b\u522b"},"id":{"language":"id","value":"item ini adalah obyek konkret (instans) dari kelas, kategori, atau kelompok obyek ini"},"sr-el":{"language":"sr-el","value":"ova stavka je konkretan objekat (instanca) klase, kategorije ili grupe objekata"},"br":{"language":"br","value":"bez' ez eo an elfenn-ma\u00f1 lodek eus ar rummad pe renkad-ma\u00f1, pe un elfenn eus ur strollad"},"zh-hant":{"language":"zh-hant","value":"\u9805\u6240\u5c6c\u7684\uff0c\u4ee5\u9805\u70ba\u5be6\u4f8b\u7684\u985e\u5225"},"sl":{"language":"sl","value":"je konkretna izvedba objekta v razredu, kategoriji ali skupini objektov"},"nap":{"language":"nap","value":"sto fatto o sta cosa \u00e8 no tipo 'e nato elemento"},"rif":{"language":"rif","value":"\u062d\u062c\u064a\u062c"},"zh-tw":{"language":"zh-tw","value":"\u9805\u6240\u5c6c\u7684\uff0c\u4ee5\u9805\u70ba\u5be6\u4f8b\u7684\u985e\u5225"},"yue":{"language":"yue","value":"\u5462\u4ef6\u5622\u4fc2\u4ee5\u4e0b\u6982\u5ff5\u5605\u4e00\u500b\u5be6\u4f8b"},"scn":{"language":"scn","value":"sta cosa \u00e8' n'esemplari cuncretu di sta classi, catiguria, o gruppu di cosi"},"eu":{"language":"eu","value":"elementu hau mota, kategoria edo ale talde honen ale (adibide) konkretu bat da"},"my":{"language":"my","value":"\u103b\u1019\u1014\u1039\u1019\u102c\u1018\u102c\u101e\u102c"}},"claims":{"P1628":[{"id":"P31$696d3f45-4cba-b3db-1648-0da6c3b29380","mainsnak":{"snaktype":"value","property":"P1628","datatype":"url","datavalue":{"value":"http://www.w3.org/1999/02/22-rdf-syntax-ns#type","type":"string"}},"type":"statement","rank":"normal"}]},"datatype":"wikibase-item"}},"success":1}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-RandomImage.jpg.json000066400000000000000000000006571444772566300325430ustar00rootroot00000000000000{ "entities": { "M65057": { "pageid": 65057, "ns": 6, "title": "File:RandomImage 4658098723742867.jpg", "lastrevid": 146188, "modified": "2019-11-19T20:36:58Z", "type": "mediainfo", "id": "M65057", "labels": { "en": { "language": "en", "value": "ddddd" } }, "descriptions": {}, "statements": [] } }, "success": 1 } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-bogus.json000066400000000000000000000004351444772566300307120ustar00rootroot00000000000000{"servedby":"mw1223","error":{"code":"no-such-entity","info":"Could not find such an entity (Invalid id: bogus)","messages":[{"name":"wikibase-api-no-such-entity","parameters":[],"html":{"*":"Could not find such an entity"}}],"*":"See http://www.wikidata.org/w/api.php for API usage"}}Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-missing-mid.json000066400000000000000000000001131444772566300320040ustar00rootroot00000000000000{"entities": {"M91629437": {"id":"M91629437","missing":""} } } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-titles.json000066400000000000000000000020321444772566300310720ustar00rootroot00000000000000{ "entities": { "M89": { "pageid": 125658679, "ns": 6, "title": "File:Foo-1.png", "lastrevid": 734839557, "modified": "2023-02-22T14:19:55Z", "type": "mediainfo", "id": "M89", "labels": { "en": { "language": "en", "value": "an amazing foo in its natural habitat" } }, "descriptions": {}, "statements": {} }, "M37": { "pageid": 1256586, "ns": 6, "title": "File:Bar.svg", "lastrevid": 7348395, "modified": "2023-02-22T14:19:55Z", "type": "mediainfo", "id": "M37", "labels": { "en": { "language": "en", "value": "the coats of arms of Bar" } }, "descriptions": {}, "statements": {} } }, "success": 1 } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbgetentities-virtual-Test.jpg.json000066400000000000000000000001541444772566300327530ustar00rootroot00000000000000{ "entities": { "M4215516": { "id": "M4215516", "missing": "" } }, "success": 1 } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbremoveclaims.json000066400000000000000000000001751444772566300277400ustar00rootroot00000000000000{ "pageinfo" : { "lastrevid" : 1234 }, "success" : 1, "claims" : [ "Q1$427C0317-BA8C-95B0-16C8-1A1B5FAC1081" ] } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsearchentities-abc.json000066400000000000000000000056351444772566300310150ustar00rootroot00000000000000{ "searchinfo": { "search": "abc" }, "search": [ { "id": "Q169889", "concepturi": "http://www.wikidata.org/entity/Q169889", "url": "//www.wikidata.org/wiki/Q169889", "title": "Q169889", "pageid": 170288, "label": "American Broadcasting Company", "description": "American broadcast television network", "match": { "type": "alias", "language": "en", "text": "ABC" }, "aliases": [ "ABC" ] }, { "id": "Q286874", "concepturi": "http://www.wikidata.org/entity/Q286874", "url": "//www.wikidata.org/wiki/Q286874", "title": "Q286874", "pageid": 277328, "label": "ABC", "description": "Wikimedia disambiguation page", "match": { "type": "label", "language": "en", "text": "ABC" } }, { "id": "Q781365", "concepturi": "http://www.wikidata.org/entity/Q781365", "url": "//www.wikidata.org/wiki/Q781365", "title": "Q781365", "pageid": 734387, "label": "Australian Broadcasting Corporation", "description": "Australia's state-owned and funded national public broadcaster", "match": { "type": "alias", "language": "en", "text": "ABC" }, "aliases": [ "ABC" ] }, { "id": "Q287076", "concepturi": "http://www.wikidata.org/entity/Q287076", "url": "//www.wikidata.org/wiki/Q287076", "title": "Q287076", "pageid": 277519, "label": "ABC", "description": "Spanish newspaper", "match": { "type": "label", "language": "en", "text": "ABC" } }, { "id": "Q304330", "concepturi": "http://www.wikidata.org/entity/Q304330", "url": "//www.wikidata.org/wiki/Q304330", "title": "Q304330", "pageid": 293199, "label": "Abacavir", "description": "pharmaceutical drug", "match": { "type": "alias", "language": "en", "text": "ABC" }, "aliases": [ "ABC" ] }, { "id": "Q1057802", "concepturi": "http://www.wikidata.org/entity/Q1057802", "url": "//www.wikidata.org/wiki/Q1057802", "title": "Q1057802", "pageid": 1006628, "label": "ABC", "description": "programming language", "match": { "type": "label", "language": "en", "text": "ABC" } }, { "id": "Q26298", "concepturi": "http://www.wikidata.org/entity/Q26298", "url": "//www.wikidata.org/wiki/Q26298", "title": "Q26298", "pageid": 29721, "label": "Agricultural Bank of China", "description": "major bank in the People's Republic of China", "match": { "type": "alias", "language": "en", "text": "ABC" }, "aliases": [ "ABC" ] } ], "search-continue": 7, "success": 1 } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsearchentities-empty.json000066400000000000000000000001551444772566300314160ustar00rootroot00000000000000{ "searchinfo": { "search": "some search string with no results" }, "search": [], "success": 1 } Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsetaliases-add-remove.json000066400000000000000000000004211444772566300314220ustar00rootroot00000000000000{ "entity" : { "aliases" : { "en" : [ { "language" : "en", "value" : "old alias" }, { "language" : "en", "value" : "hello" } ] }, "id" : "Q1", "type" : "item", "lastrevid" : 1234 }, "success" : 1 }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsetclaim.json000066400000000000000000000007571444772566300270610ustar00rootroot00000000000000{ "pageinfo" : { "lastrevid" : 1234 }, "success" : 1, "claim" : { "mainsnak" : { "snaktype" : "value", "property" : "P1", "hash" : "a3438cfe0cbb10e069ce2dbae1d7baf9e81ac111", "datavalue" : { "value" : { "amount" : "+456", "unit" : "1" }, "type" : "quantity" }, "datatype" : "quantity" }, "type" : "statement", "id" : "Q1$427C0317-BA8C-95B0-16C8-1A1B5FAC1081", "rank" : "normal" } }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsetdescription-null.json000066400000000000000000000003161444772566300312560ustar00rootroot00000000000000{ "entity" : { "descriptions" : { "en" : { "language" : "en", "value" : "hello" } }, "id" : "Q1", "type" : "item", "lastrevid" : 1234 }, "success" : 1 }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsetdescription.json000066400000000000000000000003161444772566300303060ustar00rootroot00000000000000{ "entity" : { "descriptions" : { "en" : { "language" : "en", "value" : "hello" } }, "id" : "Q1", "type" : "item", "lastrevid" : 1234 }, "success" : 1 }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsetlabel-null.json000066400000000000000000000002161444772566300300110ustar00rootroot00000000000000{ "entity" : { "labels" : { "en" : null }, "id" : "Q1", "type" : "item", "lastrevid" : 1234 }, "success" : 1 }Wikidata-Toolkit-0.14.6/wdtk-wikibaseapi/src/test/resources/wbsetlabel.json000066400000000000000000000003101444772566300270340ustar00rootroot00000000000000{ "entity" : { "labels" : { "en" : { "language" : "en", "value" : "hello" } }, "id" : "Q1", "type" : "item", "lastrevid" : 1234 }, "success" : 1 }