debian-changelog-0.2.0/.cargo_vcs_info.json0000644000000001360000000000100142020ustar { "git": { "sha1": "893fadb1ec0c425f01b40b426e9a04f96e98475e" }, "path_in_vcs": "" }debian-changelog-0.2.0/.github/CODEOWNERS000064400000000000000000000000121046102023000157160ustar 00000000000000* @jelmer debian-changelog-0.2.0/.github/FUNDING.yml000064400000000000000000000000171046102023000161450ustar 00000000000000github: jelmer debian-changelog-0.2.0/.github/dependabot.yml000064400000000000000000000006251046102023000171650ustar 00000000000000# Please see the documentation for all configuration options: # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "weekly" rebase-strategy: "disabled" - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly debian-changelog-0.2.0/.github/workflows/rust.yml000064400000000000000000000010471046102023000201110ustar 00000000000000name: Rust on: push: pull_request: env: CARGO_TERM_COLOR: always jobs: build: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] fail-fast: false steps: - uses: actions/checkout@v4 - run: cargo install cargo-deny - name: Build run: cargo build --verbose env: RUSTFLAGS: -Dwarnings - name: Run tests run: cargo test --verbose env: RUSTFLAGS: -Dwarnings - name: Run cargo deny run: cargo deny check debian-changelog-0.2.0/.gitignore000064400000000000000000000000131046102023000147540ustar 00000000000000/target *~ debian-changelog-0.2.0/Cargo.lock0000644000000334030000000000100121600ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "autocfg" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "bitflags" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "cc" version = "1.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b62ac837cdb5cb22e10a256099b4fc502b1dfe560cb282963a974d7abd80e476" dependencies = [ "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", "windows-targets", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "debian-changelog" version = "0.2.0" dependencies = [ "chrono", "debversion", "flate2", "lazy-regex", "log", "maplit", "rowan", "textwrap", "whoami", ] [[package]] name = "debversion" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b892997e53d52f9ac5c30bdac09cbea6bb1eeb3f93a204b8548774081a44b496" dependencies = [ "chrono", "lazy-regex", ] [[package]] name = "flate2" version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" dependencies = [ "crc32fast", "miniz_oxide", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "iana-time-zone" version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "js-sys" version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] [[package]] name = "lazy-regex" version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d8e41c97e6bc7ecb552016274b99fbb5d035e8de288c582d9b933af6677bfda" dependencies = [ "lazy-regex-proc_macros", "once_cell", "regex", ] [[package]] name = "lazy-regex-proc_macros" version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76e1d8b05d672c53cb9c7b920bbba8783845ae4f0b076e02a3db1d02c81b4163" dependencies = [ "proc-macro2", "quote", "regex", "syn", ] [[package]] name = "libc" version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miniz_oxide" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ "adler2", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "proc-macro2" version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "redox_syscall" version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" dependencies = [ "bitflags", ] [[package]] name = "regex" version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "rowan" version = "0.15.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a542b0253fa46e632d27a1dc5cf7b930de4df8659dc6e720b647fc72147ae3d" dependencies = [ "countme", "hashbrown", "rustc-hash", "text-size", ] [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "smawk" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "syn" version = "2.0.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "text-size" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "textwrap" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" dependencies = [ "smawk", "unicode-linebreak", "unicode-width", ] [[package]] name = "unicode-ident" version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-width" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" [[package]] name = "wasite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "whoami" version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" dependencies = [ "redox_syscall", "wasite", ] [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" debian-changelog-0.2.0/Cargo.toml0000644000000026720000000000100122070ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "debian-changelog" version = "0.2.0" authors = ["Jelmer Vernooij "] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Parser for Debian changelog files" readme = "README.md" license = "Apache-2.0" repository = "https://github.com/jelmer/debian-changelog-rs" [lib] name = "debian_changelog" path = "src/lib.rs" [[example]] name = "build" path = "examples/build.rs" [[example]] name = "dch" path = "examples/dch.rs" [[example]] name = "simple" path = "examples/simple.rs" [dependencies.chrono] version = "0.4.38" [dependencies.debversion] version = ">=0.2" [dependencies.lazy-regex] version = ">=2" [dependencies.log] version = "0.4" [dependencies.rowan] version = "0.15.16" [dependencies.textwrap] version = "0.16.0" [dependencies.whoami] version = "1" default-features = false [dev-dependencies.flate2] version = "1.0" [dev-dependencies.maplit] version = "1.0.2" debian-changelog-0.2.0/Cargo.toml.orig000064400000000000000000000007721046102023000156670ustar 00000000000000[package] name = "debian-changelog" repository = "https://github.com/jelmer/debian-changelog-rs" description = "Parser for Debian changelog files" version = "0.2.0" edition = "2021" license = "Apache-2.0" readme = "README.md" authors = [ "Jelmer Vernooij ",] [dependencies] chrono = "0.4.38" debversion = ">=0.2" lazy-regex = ">=2" log = "0.4" rowan = "0.15.16" textwrap = "0.16.0" whoami = { version = "1", default-features = false } [dev-dependencies] flate2 = "1.0" maplit = "1.0.2" debian-changelog-0.2.0/LICENSE000064400000000000000000000261361046102023000140070ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. debian-changelog-0.2.0/README.md000064400000000000000000000030121046102023000142450ustar 00000000000000Debian Changelog parser ======================= This crate provides a parser for debian/changelog files, as described in the Debian policy, [section 4.4](https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog). The parser builds a CST. It is lossless - i.e. preserves formatting, and allows editing and partial parsing. Example: ```rust use std::io::Read; fn main() -> Result<(), Box> { let file = std::fs::File::open("/usr/share/doc/rustc/changelog.Debian.gz")?; let mut gz = flate2::read::GzDecoder::new(file); let mut contents = String::new(); gz.read_to_string(&mut contents)?; let changelog: debian_changelog::ChangeLog = contents.parse()?; for entry in changelog.entries() { println!( "{}: {}", entry.package().unwrap(), entry.version().unwrap().to_string() ); } Ok(()) } ``` Or to update an existing changelog file: ```rust use std::io::Read; fn main() -> Result<(), Box> { let file = std::fs::File::open("debian/changelog")?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let changelog: debian_changelog::ChangeLog = contents.parse()?; changelog.auto_add_change( &["* Make a change"], ( "Jelmer Vernooij".to_string(), "jelmer@debian.org".to_string(), ), None, None, ); std::fs::write("debian/changelog", changelog.to_string())?; Ok(()) } ``` debian-changelog-0.2.0/deny.toml000064400000000000000000000255001046102023000146300ustar 00000000000000# This template contains all of the possible sections and their default values # Note that all fields that take a lint level have these possible values: # * deny - An error will be produced and the check will fail # * warn - A warning will be produced, but the check will not fail # * allow - No warning or error will be produced, though in some cases a note # will be # The values provided in this template are the default values that will be used # when any section or field is not specified in your own configuration # Root options # The graph table configures how the dependency graph is constructed and thus # which crates the checks are performed against [graph] # If 1 or more target triples (and optionally, target_features) are specified, # only the specified targets will be checked when running `cargo deny check`. # This means, if a particular package is only ever used as a target specific # dependency, such as, for example, the `nix` crate only being used via the # `target_family = "unix"` configuration, that only having windows targets in # this list would mean the nix crate, as well as any of its exclusive # dependencies not shared by any other crates, would be ignored, as the target # list here is effectively saying which targets you are building for. targets = [ # The triple can be any string, but only the target triples built in to # rustc (as of 1.40) can be checked against actual config expressions #"x86_64-unknown-linux-musl", # You can also specify which target_features you promise are enabled for a # particular target. target_features are currently not validated against # the actual valid features supported by the target architecture. #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, ] # When creating the dependency graph used as the source of truth when checks are # executed, this field can be used to prune crates from the graph, removing them # from the view of cargo-deny. This is an extremely heavy hammer, as if a crate # is pruned from the graph, all of its dependencies will also be pruned unless # they are connected to another crate in the graph that hasn't been pruned, # so it should be used with care. The identifiers are [Package ID Specifications] # (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) #exclude = [] # If true, metadata will be collected with `--all-features`. Note that this can't # be toggled off if true, if you want to conditionally enable `--all-features` it # is recommended to pass `--all-features` on the cmd line instead all-features = false # If true, metadata will be collected with `--no-default-features`. The same # caveat with `all-features` applies no-default-features = false # If set, these feature will be enabled when collecting metadata. If `--features` # is specified on the cmd line they will take precedence over this option. #features = [] # The output table provides options for how/if diagnostics are outputted [output] # When outputting inclusion graphs in diagnostics that include features, this # option can be used to specify the depth at which feature edges will be added. # This option is included since the graphs can be quite large and the addition # of features from the crate(s) to all of the graph roots can be far too verbose. # This option can be overridden via `--feature-depth` on the cmd line feature-depth = 1 # This section is considered when running `cargo deny check advisories` # More documentation for the advisories section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html [advisories] # The path where the advisory databases are cloned/fetched into #db-path = "$CARGO_HOME/advisory-dbs" # The url(s) of the advisory databases to use #db-urls = ["https://github.com/rustsec/advisory-db"] # A list of advisory IDs to ignore. Note that ignored advisories will still # output a note when they are encountered. ignore = [ #"RUSTSEC-0000-0000", #{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, #"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish #{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, ] # If this is true, then cargo deny will use the git executable to fetch advisory database. # If this is false, then it uses a built-in git library. # Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. # See Git Authentication for more information about setting up git authentication. #git-fetch-with-cli = true # This section is considered when running `cargo deny check licenses` # More documentation for the licenses section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html [licenses] # List of explicitly allowed licenses # See https://spdx.org/licenses/ for list of possible licenses # [possible values: any SPDX 3.11 short identifier (+ optional exception)]. allow = [ "MIT", "Apache-2.0", "Unicode-DFS-2016", #"Apache-2.0 WITH LLVM-exception", ] # The confidence threshold for detecting a license from license text. # The higher the value, the more closely the license text must be to the # canonical license text of a valid SPDX license file. # [possible values: any between 0.0 and 1.0]. confidence-threshold = 0.8 # Allow 1 or more licenses on a per-crate basis, so that particular licenses # aren't accepted for every possible crate as with the normal allow list exceptions = [ # Each entry is the crate and version constraint, and its specific allow # list #{ allow = ["Zlib"], crate = "adler32" }, ] # Some crates don't have (easily) machine readable licensing information, # adding a clarification entry for it allows you to manually specify the # licensing information #[[licenses.clarify]] # The package spec the clarification applies to #crate = "ring" # The SPDX expression for the license requirements of the crate #expression = "MIT AND ISC AND OpenSSL" # One or more files in the crate's source used as the "source of truth" for # the license expression. If the contents match, the clarification will be used # when running the license check, otherwise the clarification will be ignored # and the crate will be checked normally, which may produce warnings or errors # depending on the rest of your configuration #license-files = [ # Each entry is a crate relative path, and the (opaque) hash of its contents #{ path = "LICENSE", hash = 0xbd0eed23 } #] [licenses.private] # If true, ignores workspace crates that aren't published, or are only # published to private registries. # To see how to mark a crate as unpublished (to the official registry), # visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. ignore = false # One or more private registries that you might publish crates to, if a crate # is only published to private registries, and ignore is true, the crate will # not have its license(s) checked registries = [ #"https://sekretz.com/registry ] # This section is considered when running `cargo deny check bans`. # More documentation about the 'bans' section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html [bans] # Lint level for when multiple versions of the same crate are detected multiple-versions = "warn" # Lint level for when a crate version requirement is `*` wildcards = "allow" # The graph highlighting used when creating dotgraphs for crates # with multiple versions # * lowest-version - The path to the lowest versioned duplicate is highlighted # * simplest-path - The path to the version with the fewest edges is highlighted # * all - Both lowest-version and simplest-path are used highlight = "all" # The default lint level for `default` features for crates that are members of # the workspace that is being checked. This can be overridden by allowing/denying # `default` on a crate-by-crate basis if desired. workspace-default-features = "allow" # The default lint level for `default` features for external crates that are not # members of the workspace. This can be overridden by allowing/denying `default` # on a crate-by-crate basis if desired. external-default-features = "allow" # List of crates that are allowed. Use with care! allow = [ #"ansi_term@0.11.0", #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, ] # List of crates to deny deny = [ #"ansi_term@0.11.0", #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, # Wrapper crates can optionally be specified to allow the crate when it # is a direct dependency of the otherwise banned crate #{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, ] # List of features to allow/deny # Each entry the name of a crate and a version range. If version is # not specified, all versions will be matched. #[[bans.features]] #crate = "reqwest" # Features to not allow #deny = ["json"] # Features to allow #allow = [ # "rustls", # "__rustls", # "__tls", # "hyper-rustls", # "rustls", # "rustls-pemfile", # "rustls-tls-webpki-roots", # "tokio-rustls", # "webpki-roots", #] # If true, the allowed features must exactly match the enabled feature set. If # this is set there is no point setting `deny` #exact = true # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ #"ansi_term@0.11.0", #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive # dependencies starting at the specified crate, up to a certain depth, which is # by default infinite. skip-tree = [ #"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies #{ crate = "ansi_term@0.11.0", depth = 20 }, ] # This section is considered when running `cargo deny check sources`. # More documentation about the 'sources' section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html [sources] # Lint level for what to happen when a crate from a crate registry that is not # in the allow list is encountered unknown-registry = "warn" # Lint level for what to happen when a crate from a git repository that is not # in the allow list is encountered unknown-git = "warn" # List of URLs for allowed crate registries. Defaults to the crates.io index # if not specified. If it is specified but empty, no registries are allowed. allow-registry = ["https://github.com/rust-lang/crates.io-index"] # List of URLs for allowed Git repositories allow-git = [] [sources.allow-org] # github.com organizations to allow git sources for github = [] # gitlab.com organizations to allow git sources for gitlab = [] # bitbucket.org organizations to allow git sources for bitbucket = [] debian-changelog-0.2.0/disperse.conf000064400000000000000000000000461046102023000154570ustar 00000000000000timeout_days: 5 tag_name: "v$VERSION" debian-changelog-0.2.0/examples/build.rs000064400000000000000000000014411046102023000162550ustar 00000000000000//! A simple example of generate a Debian changelog file. use debian_changelog::{ChangeLog, Urgency}; fn main() { let mut changelog = ChangeLog::new(); // Note that most of these are optional and fall back to sensible defaults. changelog .new_entry() .package("example".to_string()) .version("0.1.0".parse().unwrap()) .distribution("unstable".to_string()) .urgency(Urgency::Low) .maintainer(("John Doe".to_string(), "john@example.com".to_string())) .datetime(chrono::DateTime::parse_from_rfc3339("2018-01-01T00:00:00+00:00").unwrap()) .change_line("* This is a change".to_string()) .finish(); // You can also use changelog.auto_add_change(), which behaves similarly to "dch" println!("{}", changelog); } debian-changelog-0.2.0/examples/dch.rs000064400000000000000000000012451046102023000157160ustar 00000000000000//! A simple example of making a change to a changelog file use std::io::Read; fn main() -> Result<(), Box> { let file = std::fs::File::open("/usr/share/doc/rustc/changelog.Debian.gz")?; let mut gz = flate2::read::GzDecoder::new(file); let mut contents = String::new(); gz.read_to_string(&mut contents)?; let mut changelog: debian_changelog::ChangeLog = contents.parse()?; changelog.auto_add_change( &["* Make a change"], ( "Jelmer Vernooij".to_string(), "jelmer@debian.org".to_string(), ), None, None, ); changelog.write(std::io::stdout())?; Ok(()) } debian-changelog-0.2.0/examples/simple.rs000064400000000000000000000011161046102023000164460ustar 00000000000000//! A simple example of parsing a Debian changelog. use std::io::Read; fn main() -> Result<(), Box> { let file = std::fs::File::open("/usr/share/doc/rustc/changelog.Debian.gz")?; let mut gz = flate2::read::GzDecoder::new(file); let mut contents = String::new(); gz.read_to_string(&mut contents)?; let changelog: debian_changelog::ChangeLog = contents.parse()?; for entry in changelog.entries() { println!( "{}: {}", entry.package().unwrap(), entry.version().unwrap() ); } Ok(()) } debian-changelog-0.2.0/src/changes.rs000064400000000000000000000340341046102023000155430ustar 00000000000000//! Functions to parse the changes from a changelog entry. use lazy_regex::regex_captures; // A specific section in a changelog entry, e.g.: // // ``` // [ Joe Example] // * Foo, bar // + Blah // * Foo // * Foo // ``` #[derive(Default, Debug, PartialEq, Eq)] struct Section<'a> { // Title of the section, if any title: Option<&'a str>, // Line numbers of the section linenos: Vec, // List of changes in the section changes: Vec>, } /// Return the different sections from a set of changelog entries. /// /// # Arguments /// * `changes`: list of changes from a changelog entry /// /// # Returns /// /// An iterator over tuples with: /// (author, list of line numbers, list of list of (lineno, line) tuples fn changes_sections<'a>( changes: impl Iterator, ) -> impl Iterator> { let mut ret: Vec> = vec![]; let mut section = Section::<'a>::default(); let mut change = Vec::<(usize, &'a str)>::new(); for (i, line) in changes.enumerate() { if line.is_empty() && i == 0 { // Skip the first line continue; } if line.is_empty() { section.linenos.push(i); continue; } if let Some((_, author)) = regex_captures!(r"^\[ (.*) \]$", line) { if !change.is_empty() { section.changes.push(change); change = Vec::new(); } if !section.linenos.is_empty() { ret.push(section); } section = Section { title: Some(author), linenos: vec![i], changes: vec![], }; } else if !line.starts_with("* ") { change.push((i, line)); section.linenos.push(i); } else { if !change.is_empty() { section.changes.push(change); } change = vec![(i, line)]; section.linenos.push(i); } } if !change.is_empty() { section.changes.push(change); } if !section.linenos.is_empty() { ret.push(section); } ret.into_iter() } /// Iterate over changes by author /// /// # Arguments /// * `changes`: list of changes from a changelog entry /// /// # Returns /// An iterator over tuples with: /// (author, list of line numbers, list of lines) pub fn changes_by_author<'a>( changes: impl Iterator, ) -> impl Iterator, Vec, Vec<&'a str>)> { changes_sections(changes).flat_map(|section| { section .changes .into_iter() .map(|change_entry| { let (linenos, lines): (Vec<_>, Vec<_>) = change_entry.into_iter().unzip(); (section.title, linenos, lines) }) .collect::>() }) } #[cfg(test)] mod changes_sections_tests { #[test] fn test_simple() { let iter = super::changes_sections(vec!["", "* Change 1", "* Change 2", " rest", ""].into_iter()); assert_eq!( vec![super::Section { title: None, linenos: vec![1, 2, 3, 4], changes: vec![ (vec![(1, "* Change 1")]), (vec![(2, "* Change 2"), (3, " rest")]) ] }], iter.collect::>() ); } #[test] fn test_with_header() { assert_eq!( vec![ super::Section { title: Some("Author 1"), linenos: vec![1, 2, 3], changes: vec![(vec![(2, "* Change 1")])] }, super::Section { title: Some("Author 2"), linenos: vec![4, 5, 6, 7], changes: vec![(vec![(5, "* Change 2"), (6, " rest")])] }, ], super::changes_sections( vec![ "", "[ Author 1 ]", "* Change 1", "", "[ Author 2 ]", "* Change 2", " rest", "", ] .into_iter() ) .collect::>() ); } } /// Strip a changelog message like debcommit does. /// /// Takes a list of changes from a changelog entry and applies a transformation /// so the message is well formatted for a commit message. /// /// # Arguments: /// * `changes` - a list of lines from the changelog entry /// /// # Returns /// Another list of lines with blank lines stripped from the start and the /// spaces the start of the lines split if there is only one logical entry. pub fn strip_for_commit_message(mut changes: Vec<&str>) -> Vec<&str> { if changes.is_empty() { return vec![]; } while let Some(last) = changes.last() { if last.trim().is_empty() { changes.pop(); } else { break; } } while let Some(first) = changes.first() { if first.trim().is_empty() { changes.remove(0); } else { break; } } let changes = changes .into_iter() .map(|mut line| loop { if line.starts_with(" ") { line = &line[2..]; } else if line.starts_with('\t') { line = &line[1..]; } else { break line; } }) .collect::>(); // Drop bullet points let bullet_points_dropped = changes .iter() .map(|line| { let line = line.trim_start(); if line.starts_with("* ") || line.starts_with("+ ") || line.starts_with("- ") { line[1..].trim_start() } else { line } }) .collect::>(); if bullet_points_dropped.len() == 1 { bullet_points_dropped } else { changes } } #[cfg(test)] mod strip_for_commit_message_tests { #[test] fn test_no_changes() { assert_eq!(super::strip_for_commit_message(vec![]), Vec::<&str>::new()); } #[test] fn test_empty_changes() { assert_eq!( super::strip_for_commit_message(vec![""]), Vec::<&str>::new() ); } #[test] fn test_removes_leading_whitespace() { assert_eq!( super::strip_for_commit_message(vec!["foo", "bar", "\tbaz", " bang"]), vec!["foo", "bar", "baz", " bang"] ); } #[test] fn test_removes_star_if_one() { assert_eq!(super::strip_for_commit_message(vec!["* foo"]), vec!["foo"]); assert_eq!( super::strip_for_commit_message(vec!["\t* foo"]), vec!["foo"] ); assert_eq!(super::strip_for_commit_message(vec!["+ foo"]), vec!["foo"]); assert_eq!(super::strip_for_commit_message(vec!["- foo"]), vec!["foo"]); assert_eq!(super::strip_for_commit_message(vec!["* foo"]), vec!["foo"]); assert_eq!( super::strip_for_commit_message(vec!["* foo", " bar"]), vec!["* foo", " bar"] ); } #[test] fn test_leaves_start_if_multiple() { assert_eq!( super::strip_for_commit_message(vec!["* foo", "* bar"]), vec!["* foo", "* bar"] ); assert_eq!( super::strip_for_commit_message(vec!["* foo", "+ bar"]), vec!["* foo", "+ bar"] ); assert_eq!( super::strip_for_commit_message(vec!["* foo", "bar", "* baz"]), vec!["* foo", "bar", "* baz"] ); } } /// Format a section title. pub fn format_section_title(title: &str) -> String { format!("[ {} ]", title) } #[cfg(test)] mod format_section_title_tests { #[test] fn test() { assert_eq!(super::format_section_title("foo"), "[ foo ]"); } } /// Add a change to the list of changes, attributed to a specific author. /// /// This will add a new section for the author if there are no sections yet. /// /// # Example /// /// ``` /// let mut changes = vec![]; /// debian_changelog::changes::add_change_for_author(&mut changes, "Author 1", vec!["* Change 1"], None); /// assert_eq!(changes, vec!["* Change 1"]); /// ``` pub fn add_change_for_author( changes: &mut Vec, author_name: &str, change: Vec<&str>, default_author: Option<(String, String)>, ) { let by_author = changes_by_author(changes.iter().map(|s| s.as_str())).collect::>(); // There are no per author sections yet, so attribute current changes to changelog entry author if by_author.iter().all(|(a, _, _)| a.is_none()) { if let Some((default_name, _default_email)) = default_author { if author_name != default_name.as_str() { if !changes.is_empty() { changes.insert(0, format_section_title(default_name.as_str())); if !changes.last().unwrap().is_empty() { changes.push("".to_string()); } } changes.push(format_section_title(author_name)); } } } else if let Some(last_section) = by_author.last().as_ref() { // There is a last section, so add a new section only if it is not for the same author if last_section.0 != Some(author_name) { changes.push("".to_string()); changes.push(format_section_title(author_name)); } } changes.extend(crate::textwrap::rewrap_changes(change.into_iter()).map(|s| s.to_string())); } #[cfg(test)] mod add_change_for_author_tests { use super::*; #[test] fn test_matches_default() { let mut changes = vec![]; add_change_for_author( &mut changes, "Author 1", vec!["* Change 1"], Some(("Author 1".to_string(), "jelmer@debian.org".to_string())), ); assert_eq!(changes, vec!["* Change 1"]); } #[test] fn test_not_matches_default() { let mut changes = vec![]; add_change_for_author( &mut changes, "Author 1", vec!["* Change 1"], Some(( "Default Author".to_string(), "jelmer@debian.org".to_string(), )), ); assert_eq!(changes, vec!["[ Author 1 ]", "* Change 1"]); } } /// Find additional authors from a changelog entry pub fn find_extra_authors<'a>(changes: &'a [&'a str]) -> std::collections::HashSet<&'a str> { changes_by_author(changes.iter().copied()) .filter_map(|(author, _, _)| author) .collect::>() } #[test] fn test_find_extra_authors() { assert_eq!( find_extra_authors(&["[ Author 1 ]", "* Change 1"]), maplit::hashset! {"Author 1"} ); assert_eq!( find_extra_authors(&["[ Author 1 ]", "[ Author 2 ]", "* Change 1"]), maplit::hashset! {"Author 2"} ); assert_eq!( find_extra_authors(&["[ Author 1 ]", "[ Author 2 ]", "* Change 1", "* Change 2"]), maplit::hashset! {"Author 2"} ); assert_eq!( find_extra_authors(&["[ Author 1 ]", "* Change 1", "[ Author 2 ]", "* Change 2"]), maplit::hashset! {"Author 1", "Author 2"} ); assert_eq!( find_extra_authors(&["* Change 1", "* Change 2",]), maplit::hashset! {} ); } /// Find authors that are thanked in a changelog entry pub fn find_thanks<'a>(changes: &'a [&'a str]) -> std::collections::HashSet<&'a str> { let regex = lazy_regex::regex!( r"[tT]hank(?:(?:s)|(?:you))(?:\s*to)?((?:\s+(?:(?:\w\.)|(?:\w+(?:-\w+)*)))+(?:\s+<[^@>]+@[^@>]+>)?)" ); changes_by_author(changes.iter().copied()) .flat_map(|(_, _, lines)| { lines.into_iter().map(|line| { regex .captures_iter(line) .map(|m| m.get(1).unwrap().as_str().trim()) }) }) .flatten() .collect::>() } #[test] fn test_find_thanks() { assert_eq!(find_thanks(&[]), maplit::hashset! {}); assert_eq!(find_thanks(&["* Do foo", "* Do bar"]), maplit::hashset! {}); assert_eq!( find_thanks(&["* Thanks to A. Hacker"]), maplit::hashset! {"A. Hacker"} ); assert_eq!( find_thanks(&["* Thanks to James A. Hacker"]), maplit::hashset! {"James A. Hacker"} ); assert_eq!( find_thanks(&["* Thankyou to B. Hacker"]), maplit::hashset! {"B. Hacker"} ); assert_eq!( find_thanks(&["* thanks to A. Hacker"]), maplit::hashset! {"A. Hacker"} ); assert_eq!( find_thanks(&["* thankyou to B. Hacker"]), maplit::hashset! {"B. Hacker"} ); assert_eq!( find_thanks(&["* Thanks A. Hacker"]), maplit::hashset! {"A. Hacker"} ); assert_eq!( find_thanks(&["* Thankyou B. Hacker"]), maplit::hashset! {"B. Hacker"} ); assert_eq!( find_thanks(&["* Thanks to Mark A. Super-Hacker"]), maplit::hashset! {"Mark A. Super-Hacker"} ); assert_eq!( find_thanks(&["* Thanks to A. Hacker "]), maplit::hashset! {"A. Hacker "} ); assert_eq!( find_thanks(&["* Thanks to Adeodato Simó"]), maplit::hashset! {"Adeodato Simó"} ); } /// Check if all lines in a changelog entry are prefixed with a sha. /// /// This is generally done by gbp-dch(1). pub fn all_sha_prefixed(changes: &[&str]) -> bool { changes_sections(changes.iter().cloned()) .flat_map(|section| { section .changes .into_iter() .flat_map(|ls| ls.into_iter().map(|(_, l)| l)) }) .all(|line| lazy_regex::regex_is_match!(r"^\* \[[0-9a-f]{7}\] ", line)) } #[test] fn test_all_sha_prefixed() { assert!(all_sha_prefixed(&[ "* [a1b2c3d] foo", "* [a1b2c3d] bar", "* [a1b2c3d] baz", ])); assert!(!all_sha_prefixed(&[ "* [a1b2c3d] foo", "* bar", "* [a1b2c3d] baz", ])); } debian-changelog-0.2.0/src/lex.rs000064400000000000000000000174611046102023000147300ustar 00000000000000use crate::SyntaxKind; use std::iter::Peekable; use std::str::Chars; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum LineType { Header, Body, Footer, } pub struct Lexer<'a> { input: Peekable>, line_type: Option, } impl<'a> Lexer<'a> { pub fn new(input: &'a str) -> Self { Lexer { input: input.chars().peekable(), line_type: None, } } fn is_whitespace(c: char) -> bool { c == ' ' || c == '\t' } fn is_newline(c: char) -> bool { c == '\n' || c == '\r' } fn is_valid_identifier_char(c: char) -> bool { c.is_ascii_alphanumeric() || c == '-' || c == '.' } fn read_while(&mut self, predicate: F) -> String where F: Fn(char) -> bool, { let mut result = String::new(); while let Some(&c) = self.input.peek() { if predicate(c) { result.push(c); self.input.next(); } else { break; } } result } fn read_while_n(&mut self, n: usize, predicate: F) -> String where F: Fn(char) -> bool, { let mut result = String::new(); while let Some(&c) = self.input.peek() { if predicate(c) { result.push(c); self.input.next(); if result.len() >= n { break; } } else { break; } } result } fn next_token(&mut self) -> Option<(SyntaxKind, String)> { if let Some(&c) = self.input.peek() { match (c, self.line_type) { (c, None) | (c, Some(LineType::Header)) if Self::is_valid_identifier_char(c) => { let identifier = self.read_while(Self::is_valid_identifier_char); self.line_type = Some(LineType::Header); Some((SyntaxKind::IDENTIFIER, identifier)) } (c, None) if Self::is_whitespace(c) => { let mut indent = self.read_while_n(2, |c| c == ' '); if indent.len() == 1 { let dashes = self.read_while(|c| c == '-' || c == ' '); indent.push_str(dashes.as_str()); self.line_type = Some(LineType::Footer); } else { self.line_type = Some(LineType::Body); } Some((SyntaxKind::INDENT, indent)) } ('#', None) => { let comment = self.read_while(|c| !Self::is_newline(c)); let n = self.input.next(); if let Some(n) = n { Some((SyntaxKind::COMMENT, comment + n.to_string().as_str())) } else { Some((SyntaxKind::COMMENT, comment)) } } (c, _) if Self::is_newline(c) => { self.input.next(); self.line_type = None; Some((SyntaxKind::NEWLINE, c.to_string())) } (';', Some(LineType::Header)) => Some(( SyntaxKind::SEMICOLON, self.input.next().unwrap().to_string(), )), ('(', Some(LineType::Header)) => { let version = self .read_while(|c| c != ')' && c != ';' && c != ' ' && !Self::is_newline(c)); let n = self.input.next(); if n == Some(')') { Some(( SyntaxKind::VERSION, version + n.unwrap().to_string().as_str(), )) } else if let Some(n) = n { Some((SyntaxKind::ERROR, version + n.to_string().as_str())) } else { Some((SyntaxKind::ERROR, version)) } } ('=', Some(LineType::Header)) => { Some((SyntaxKind::EQUALS, self.input.next().unwrap().to_string())) } (_, Some(LineType::Body)) => { let detail = self.read_while(|c| !Self::is_newline(c)); Some((SyntaxKind::DETAIL, detail)) } (c, _) if Self::is_whitespace(c) => { let ws = self.read_while(Self::is_whitespace); Some((SyntaxKind::WHITESPACE, ws)) } ('<', Some(LineType::Footer)) => { let email = self.read_while(|c| c != '>' && c != ' ' && !Self::is_newline(c)); let n = self.input.next(); if n == Some('>') { Some((SyntaxKind::EMAIL, email + n.unwrap().to_string().as_str())) } else if let Some(n) = n { Some((SyntaxKind::ERROR, email + n.to_string().as_str())) } else { Some((SyntaxKind::ERROR, email)) } } (c, Some(LineType::Footer)) if !Self::is_whitespace(c) && !Self::is_newline(c) => { let identifier = self.read_while(|c| c != ' ' && c != '<' && !Self::is_newline(c)); Some((SyntaxKind::TEXT, identifier)) } (_, _) => { self.input.next(); Some((SyntaxKind::ERROR, c.to_string())) } } } else { None } } } impl Iterator for Lexer<'_> { type Item = (crate::SyntaxKind, String); fn next(&mut self) -> Option { self.next_token() } } pub(crate) fn lex(input: &str) -> Vec<(SyntaxKind, String)> { let mut lexer = Lexer::new(input); lexer.by_ref().collect::>() } #[cfg(test)] mod tests { use crate::SyntaxKind::*; #[test] fn test_empty() { assert_eq!(super::lex(""), vec![]); } #[test] fn test_simple() { assert_eq!( super::lex( r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0000 # Oh, and here is a comment "# ) .iter() .map(|(kind, text)| (*kind, text.as_str())) .collect::>(), vec![ (IDENTIFIER, "breezy"), (WHITESPACE, " "), (VERSION, "(3.3.4-1)"), (WHITESPACE, " "), (IDENTIFIER, "unstable"), (SEMICOLON, ";"), (WHITESPACE, " "), (IDENTIFIER, "urgency"), (EQUALS, "="), (IDENTIFIER, "low"), (NEWLINE, "\n"), (NEWLINE, "\n"), (INDENT, " "), (DETAIL, "* New upstream release."), (NEWLINE, "\n"), (NEWLINE, "\n"), (INDENT, " -- "), (TEXT, "Jelmer"), (WHITESPACE, " "), (TEXT, "Vernooij"), (WHITESPACE, " "), (EMAIL, ""), (WHITESPACE, " "), (TEXT, "Mon,"), (WHITESPACE, " "), (TEXT, "04"), (WHITESPACE, " "), (TEXT, "Sep"), (WHITESPACE, " "), (TEXT, "2023"), (WHITESPACE, " "), (TEXT, "18:13:45"), (WHITESPACE, " "), (TEXT, "-0000"), (NEWLINE, "\n"), (NEWLINE, "\n"), (COMMENT, "# Oh, and here is a comment\n"), ] ); } } debian-changelog-0.2.0/src/lib.rs000064400000000000000000000330241046102023000146770ustar 00000000000000#![deny(missing_docs)] //! A lossless parser for Debian changelog files. //! //! See https://manpages.debian.org/bookworm/dpkg-dev/deb-changelog.5.en.html //! //! For its format specification, see [Debian Policy](https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog). //! //! Example: //! //! ```rust //! use std::io::Read; //! let contents = r#"rustc (1.70.0+dfsg1-1) unstable; urgency=medium //! //! * Upload to unstable //! //! -- Jelmer Vernooij Wed, 20 Sep 2023 20:18:40 +0200 //! "#; //! let changelog: debian_changelog::ChangeLog = contents.parse().unwrap(); //! assert_eq!( //! vec![("rustc".to_string(), "1.70.0+dfsg1-1".parse().unwrap())], //! changelog.iter().map( //! |e| (e.package().unwrap(), e.version().unwrap())) //! .collect::>()); //! ``` mod lex; mod parse; use lazy_regex::regex_captures; pub mod changes; pub mod textwrap; pub use crate::parse::{ChangeLog, Entry, Error, ParseError, Urgency}; /// Let's start with defining all kinds of tokens and /// composite nodes. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[allow(non_camel_case_types)] #[repr(u16)] #[allow(missing_docs)] pub enum SyntaxKind { IDENTIFIER = 0, INDENT, TEXT, WHITESPACE, VERSION, // "(3.3.4-1)" SEMICOLON, // ";" EQUALS, // "=" DETAIL, // "* New upstream release." NEWLINE, // newlines are explicit ERROR, // as well as errors COMMENT, // "#" // composite nodes ROOT, // The entire file ENTRY, // A single entry ENTRY_HEADER, ENTRY_FOOTER, METADATA, METADATA_ENTRY, METADATA_KEY, METADATA_VALUE, ENTRY_BODY, DISTRIBUTIONS, EMPTY_LINE, TIMESTAMP, MAINTAINER, EMAIL, } /// Convert our `SyntaxKind` into the rowan `SyntaxKind`. impl From for rowan::SyntaxKind { fn from(kind: SyntaxKind) -> Self { Self(kind as u16) } } /// Parse a identity string /// /// # Arguments /// * `s` - The string to parse /// /// # Returns /// A tuple with name and email address pub fn parseaddr(s: &str) -> (Option<&str>, &str) { if let Some((_, name, email)) = regex_captures!(r"^(.*)\s+<(.*)>$", s) { if name.is_empty() { (None, email) } else { (Some(name), email) } } else { (None, s) } } /// Get the maintainer information from the environment. pub fn get_maintainer_from_env( get_env: impl Fn(&str) -> Option, ) -> Option<(String, String)> { use std::io::BufRead; let mut debemail = get_env("DEBEMAIL"); let mut debfullname = get_env("DEBFULLNAME"); // Split email and name if let Some(email) = debemail.as_ref() { let (parsed_name, parsed_email) = parseaddr(email); if let Some(parsed_name) = parsed_name { if debfullname.is_none() { debfullname = Some(parsed_name.to_string()); } } debemail = Some(parsed_email.to_string()); } if debfullname.is_none() || debemail.is_none() { if let Some(email) = get_env("EMAIL") { let (parsed_name, parsed_email) = parseaddr(email.as_str()); if let Some(parsed_name) = parsed_name { if debfullname.is_none() { debfullname = Some(parsed_name.to_string()); } } debemail = Some(parsed_email.to_string()); } } // Get maintainer's name let maintainer = if let Some(m) = debfullname { Some(m.trim().to_string()) } else if let Some(m) = get_env("NAME") { Some(m.trim().to_string()) } else { Some(whoami::realname()) }; // Get maintainer's mail address let email_address = if let Some(email) = debemail { Some(email) } else if let Some(email) = get_env("EMAIL") { Some(email) } else { // Read /etc/mailname or use hostname let mut addr: Option = None; if let Ok(mailname_file) = std::fs::File::open("/etc/mailname") { let mut reader = std::io::BufReader::new(mailname_file); if let Ok(line) = reader.fill_buf() { if !line.is_empty() { addr = Some(String::from_utf8_lossy(line).trim().to_string()); } } } if addr.is_none() { match whoami::fallible::hostname() { Ok(hostname) => { addr = Some(hostname); } Err(e) => { log::debug!("Failed to get hostname: {}", e); addr = None; } } } addr.map(|hostname| format!("{}@{}", whoami::username(), hostname)) }; if let (Some(maintainer), Some(email_address)) = (maintainer, email_address) { Some((maintainer, email_address)) } else { None } } /// Get the maintainer information in the same manner as dch. /// /// This function gets the information about the current user for /// the maintainer field using environment variables of gecos /// information as appropriate. /// /// It uses the same algorithm as dch to get the information, namely /// DEBEMAIL, DEBFULLNAME, EMAIL, NAME, /etc/mailname and gecos. /// /// # Returns /// /// a tuple of the full name, email pair as strings. /// Either of the pair may be None if that value couldn't /// be determined. pub fn get_maintainer() -> Option<(String, String)> { get_maintainer_from_env(|s| std::env::var(s).ok()) } #[cfg(test)] mod get_maintainer_from_env_tests { use super::*; #[test] fn test_normal() { get_maintainer(); } #[test] fn test_deb_vars() { let mut d = std::collections::HashMap::new(); d.insert("DEBFULLNAME".to_string(), "Jelmer".to_string()); d.insert("DEBEMAIL".to_string(), "jelmer@example.com".to_string()); let t = get_maintainer_from_env(|s| d.get(s).cloned()); assert_eq!( Some(("Jelmer".to_string(), "jelmer@example.com".to_string())), t ); } #[test] fn test_email_var() { let mut d = std::collections::HashMap::new(); d.insert("NAME".to_string(), "Jelmer".to_string()); d.insert("EMAIL".to_string(), "foo@example.com".to_string()); let t = get_maintainer_from_env(|s| d.get(s).cloned()); assert_eq!( Some(("Jelmer".to_string(), "foo@example.com".to_string())), t ); } } /// Simple representation of an identity. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Identity { /// Name of the maintainer pub name: String, /// Email address of the maintainer pub email: String, } impl Identity { /// Create a new identity. pub fn new(name: String, email: String) -> Self { Self { name, email } } /// Get the maintainer information from the environment. pub fn from_env() -> Option { get_maintainer().map(|(name, email)| Self { name, email }) } } impl From<(String, String)> for Identity { fn from((name, email): (String, String)) -> Self { Self { name, email } } } impl std::fmt::Display for Identity { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{} <{}>", self.name, self.email) } } /// Check if the given distribution marks an unreleased entry. pub fn distribution_is_unreleased(distribution: &str) -> bool { distribution == "UNRELEASED" || distribution.starts_with("UNRELEASED-") } /// Check if any of the given distributions marks an unreleased entry. pub fn distributions_is_unreleased(distributions: &[&str]) -> bool { distributions.iter().any(|x| distribution_is_unreleased(x)) } #[test] fn test_distributions_is_unreleased() { assert!(distributions_is_unreleased(&["UNRELEASED"])); assert!(distributions_is_unreleased(&[ "UNRELEASED-1", "UNRELEASED-2" ])); assert!(distributions_is_unreleased(&["UNRELEASED", "UNRELEASED-2"])); assert!(!distributions_is_unreleased(&["stable"])); } /// Check whether this is a traditional inaugural release pub fn is_unreleased_inaugural(cl: &ChangeLog) -> bool { let mut entries = cl.iter(); if let Some(entry) = entries.next() { if entry.is_unreleased() == Some(false) { return false; } let changes = entry.change_lines().collect::>(); if changes.len() > 1 || !changes[0].starts_with("* Initial release") { return false; } entries.next().is_none() } else { false } } #[cfg(test)] mod is_unreleased_inaugural_tests { use super::*; #[test] fn test_empty() { assert!(!is_unreleased_inaugural(&ChangeLog::new())); } #[test] fn test_unreleased_inaugural() { let mut cl = ChangeLog::new(); cl.new_entry() .maintainer(("Jelmer Vernooij".into(), "jelmer@debian.org".into())) .distribution("UNRELEASED".to_string()) .version("1.0.0".parse().unwrap()) .change_line("* Initial release".to_string()) .finish(); assert!(is_unreleased_inaugural(&cl)); } #[test] fn test_not_unreleased_inaugural() { let mut cl = ChangeLog::new(); cl.new_entry() .maintainer(("Jelmer Vernooij".into(), "jelmer@debian.org".into())) .distributions(vec!["unstable".to_string()]) .version("1.0.0".parse().unwrap()) .change_line("* Initial release".to_string()) .finish(); assert_eq!(cl.iter().next().unwrap().is_unreleased(), Some(false)); // Not unreleased assert!(!is_unreleased_inaugural(&cl)); cl.new_entry() .maintainer(("Jelmer Vernooij".into(), "jelmer@debian.org".into())) .distribution("UNRELEASED".to_string()) .version("1.0.1".parse().unwrap()) .change_line("* Some change".to_string()) .finish(); // Not inaugural assert!(!is_unreleased_inaugural(&cl)); } } const DEFAULT_DISTRIBUTION: &[&str] = &["UNRELEASED"]; /// Create a release for a changelog file. /// /// # Arguments /// * `cl` - The changelog to release /// * `distribution` - The distribution to release to. If None, the distribution /// of the previous entry is used. /// * `timestamp` - The timestamp to use for the release. If None, the current time is used. /// * `maintainer` - The maintainer to use for the release. If None, the maintainer /// is extracted from the environment. /// /// # Returns /// Whether a release was created. pub fn release( cl: &mut ChangeLog, distribution: Option>, timestamp: Option>, maintainer: Option<(String, String)>, ) -> bool { let mut entries = cl.iter(); let mut first_entry = entries.next().unwrap(); let second_entry = entries.next(); let distribution = if let Some(d) = distribution.as_ref() { d.clone() } else { // Inherit from previous entry if let Some(d) = second_entry.and_then(|e| e.distributions()) { d } else { DEFAULT_DISTRIBUTION .iter() .map(|s| s.to_string()) .collect::>() } }; if first_entry.is_unreleased() == Some(false) { take_uploadership(&mut first_entry, maintainer); first_entry.set_distributions(distribution); let timestamp = timestamp.unwrap_or(chrono::offset::Utc::now().into()); first_entry.set_datetime(timestamp); true } else { false } } /// Take uploadership of a changelog entry, but attribute contributors. /// /// # Arguments /// * `entry` - Changelog entry to modify /// * `maintainer` - Tuple with (name, email) of maintainer to take ownership pub fn take_uploadership(entry: &mut Entry, maintainer: Option<(String, String)>) { let (maintainer_name, maintainer_email) = if let Some(m) = maintainer { m } else { get_maintainer().unwrap() }; if let (Some(current_maintainer), Some(current_email)) = (entry.maintainer(), entry.email()) { if current_maintainer != maintainer_name || current_email != maintainer_email { if let Some(first_line) = entry.change_lines().next() { if first_line.starts_with("[ ") { entry.prepend_change_line( crate::changes::format_section_title(current_maintainer.as_str()).as_str(), ); } } } } entry.set_maintainer((maintainer_name, maintainer_email)); } /// Update changelog with commit messages from commits pub fn gbp_dch(path: &std::path::Path) -> std::result::Result<(), std::io::Error> { // Run the "gbp dch" command with working copy at `path` let output = std::process::Command::new("gbp") .arg("dch") .arg("--ignore-branch") .current_dir(path) .output()?; if !output.status.success() { return Err(std::io::Error::new( std::io::ErrorKind::Other, format!( "gbp dch failed: {}", String::from_utf8_lossy(&output.stderr) ), )); } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_parseaddr() { assert_eq!( (Some("Jelmer"), "jelmer@jelmer.uk"), parseaddr("Jelmer ") ); assert_eq!((None, "jelmer@jelmer.uk"), parseaddr("jelmer@jelmer.uk")); } #[test] fn test_parseaddr_empty() { assert_eq!((None, ""), parseaddr("")); } } debian-changelog-0.2.0/src/parse.rs000064400000000000000000002236771046102023000152620ustar 00000000000000use crate::lex::lex; use crate::SyntaxKind; use crate::SyntaxKind::*; use chrono::{DateTime, FixedOffset}; use debversion::Version; use rowan::ast::AstNode; use std::str::FromStr; #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, PartialOrd, Ord)] /// Urgency of the changes in the changelog entry pub enum Urgency { #[default] /// Low urgency Low, /// Medium urgency Medium, /// High urgency High, /// Emergency urgency Emergency, /// Critical urgency Critical, } impl std::fmt::Display for Urgency { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Urgency::Low => f.write_str("low"), Urgency::Medium => f.write_str("medium"), Urgency::High => f.write_str("high"), Urgency::Emergency => f.write_str("emergency"), Urgency::Critical => f.write_str("critical"), } } } impl FromStr for Urgency { type Err = ParseError; fn from_str(s: &str) -> Result { match s.to_lowercase().as_str() { "low" => Ok(Urgency::Low), "medium" => Ok(Urgency::Medium), "high" => Ok(Urgency::High), "emergency" => Ok(Urgency::Emergency), "critical" => Ok(Urgency::Critical), _ => Err(ParseError(vec![format!("invalid urgency: {}", s)])), } } } #[derive(Debug)] /// Error while reading a changelog file. pub enum Error { /// I/O Error Io(std::io::Error), /// Parsing error Parse(ParseError), } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match &self { Error::Io(e) => write!(f, "IO error: {}", e), Error::Parse(e) => write!(f, "Parse error: {}", e), } } } impl From for Error { fn from(e: std::io::Error) -> Self { Error::Io(e) } } impl std::error::Error for Error {} #[derive(Debug, Clone, PartialEq, Eq, Hash)] /// Error while parsing pub struct ParseError(Vec); impl std::fmt::Display for ParseError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { for err in &self.0 { writeln!(f, "{}", err)?; } Ok(()) } } impl std::error::Error for ParseError {} impl From for Error { fn from(e: ParseError) -> Self { Error::Parse(e) } } /// Second, implementing the `Language` trait teaches rowan to convert between /// these two SyntaxKind types, allowing for a nicer SyntaxNode API where /// "kinds" are values from our `enum SyntaxKind`, instead of plain u16 values. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum Lang {} impl rowan::Language for Lang { type Kind = SyntaxKind; fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { unsafe { std::mem::transmute::(raw.0) } } fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { kind.into() } } /// GreenNode is an immutable tree, which is cheap to change, /// but doesn't contain offsets and parent pointers. use rowan::{GreenNode, GreenToken}; /// You can construct GreenNodes by hand, but a builder /// is helpful for top-down parsers: it maintains a stack /// of currently in-progress nodes use rowan::GreenNodeBuilder; /// The parse results are stored as a "green tree". /// We'll discuss working with the results later #[derive(Debug)] struct Parse { green_node: GreenNode, #[allow(unused)] errors: Vec, } fn parse(text: &str) -> Parse { struct Parser { /// input tokens, including whitespace, /// in *reverse* order. tokens: Vec<(SyntaxKind, String)>, /// the in-progress tree. builder: GreenNodeBuilder<'static>, /// the list of syntax errors we've accumulated /// so far. errors: Vec, } impl Parser { fn error(&mut self, msg: String) { self.builder.start_node(ERROR.into()); if self.current().is_some() { self.bump(); } self.errors.push(msg); self.builder.finish_node(); } fn parse_entry_header(&mut self) { self.builder.start_node(ENTRY_HEADER.into()); self.expect(IDENTIFIER); self.skip_ws(); if self.current() == Some(NEWLINE) { self.bump(); self.builder.finish_node(); return; } self.expect(VERSION); self.skip_ws(); self.builder.start_node(DISTRIBUTIONS.into()); loop { match self.current() { Some(IDENTIFIER) => self.bump(), Some(NEWLINE) => { self.bump(); self.builder.finish_node(); self.builder.finish_node(); return; } Some(SEMICOLON) => { break; } _ => { self.error("expected distribution or semicolon".to_string()); break; } } self.skip_ws(); } self.builder.finish_node(); self.skip_ws(); self.builder.start_node(METADATA.into()); if self.current() == Some(SEMICOLON) { self.bump(); loop { self.skip_ws(); if self.current() == Some(NEWLINE) { break; } self.builder.start_node(METADATA_ENTRY.into()); if self.current() == Some(IDENTIFIER) { self.builder.start_node(METADATA_KEY.into()); self.bump(); self.builder.finish_node(); } else { self.error("expected metadata key".to_string()); self.builder.finish_node(); break; } if self.current() == Some(EQUALS) { self.bump(); } else { self.error("expected equals".to_string()); self.builder.finish_node(); break; } if self.current() == Some(IDENTIFIER) { self.builder.start_node(METADATA_VALUE.into()); self.bump(); self.builder.finish_node(); } else { self.error("expected metadata value".to_string()); self.builder.finish_node(); break; } self.builder.finish_node(); } } else if self.current() == Some(NEWLINE) { } else { self.error("expected semicolon or newline".to_string()); } self.builder.finish_node(); self.expect(NEWLINE); self.builder.finish_node(); } fn parse_entry(&mut self) { self.builder.start_node(ENTRY.into()); self.parse_entry_header(); loop { match self .tokens .last() .map(|(kind, token)| (kind, token.as_str())) { None => { self.error("unexpected end of file".to_string()); break; } // empty line Some((NEWLINE, _)) => { self.builder.start_node(EMPTY_LINE.into()); self.bump(); self.builder.finish_node(); } // details Some((INDENT, " ")) => { self.parse_entry_detail(); } // footer Some((INDENT, " -- ")) => { self.parse_entry_footer(); break; } _ => break, } } self.builder.finish_node(); } pub fn parse_entry_detail(&mut self) { self.builder.start_node(ENTRY_BODY.into()); self.expect(INDENT); match self.current() { Some(DETAIL) => { self.bump(); } Some(NEWLINE) => {} _ => { self.error("expected detail".to_string()); } } self.expect(NEWLINE); self.builder.finish_node(); } pub fn parse_entry_footer(&mut self) { self.builder.start_node(ENTRY_FOOTER.into()); if self.current() != Some(INDENT) { self.error("expected indent".to_string()); } else { let dashes = &self.tokens.last().unwrap().1; if dashes != " -- " { self.error("expected --".to_string()); } else { self.bump(); } } self.builder.start_node(MAINTAINER.into()); while self.current() == Some(TEXT) || (self.current() == Some(WHITESPACE) && self.next() != Some(EMAIL)) { self.bump(); } self.builder.finish_node(); if self.current().is_some() && self.current() != Some(NEWLINE) { self.expect(WHITESPACE); } if self.current().is_some() && self.current() != Some(NEWLINE) { self.expect(EMAIL); } if self.tokens.last().map(|(k, t)| (*k, t.as_str())) == Some((WHITESPACE, " ")) { self.bump(); } else if self.current() == Some(WHITESPACE) { self.error("expected two spaces".to_string()); } else if self.current() == Some(NEWLINE) { self.bump(); self.builder.finish_node(); return; } else { self.error(format!("expected whitespace, got {:?}", self.current())); } self.builder.start_node(TIMESTAMP.into()); loop { if self.current() != Some(TEXT) && self.current() != Some(WHITESPACE) { break; } self.bump(); } self.builder.finish_node(); self.expect(NEWLINE); self.builder.finish_node(); } fn parse(mut self) -> Parse { self.builder.start_node(ROOT.into()); loop { match self.current() { None => break, Some(NEWLINE) => { self.builder.start_node(EMPTY_LINE.into()); self.bump(); self.builder.finish_node(); } Some(COMMENT) => { self.bump(); } Some(IDENTIFIER) => { self.parse_entry(); } t => { self.error(format!("unexpected token {:?}", t)); break; } } } // Close the root node. self.builder.finish_node(); // Turn the builder into a GreenNode Parse { green_node: self.builder.finish(), errors: self.errors, } } /// Advance one token, adding it to the current branch of the tree builder. fn bump(&mut self) { let (kind, text) = self.tokens.pop().unwrap(); self.builder.token(kind.into(), text.as_str()); } /// Peek at the first unprocessed token fn current(&self) -> Option { self.tokens.last().map(|(kind, _)| *kind) } fn next(&self) -> Option { self.tokens .get(self.tokens.len() - 2) .map(|(kind, _)| *kind) } fn expect(&mut self, expected: SyntaxKind) { if self.current() != Some(expected) { self.error(format!("expected {:?}, got {:?}", expected, self.current())); } else { self.bump(); } } fn skip_ws(&mut self) { while self.current() == Some(WHITESPACE) { self.bump() } } } let mut tokens = lex(text); tokens.reverse(); Parser { tokens, builder: GreenNodeBuilder::new(), errors: Vec::new(), } .parse() } // To work with the parse results we need a view into the // green tree - the Syntax tree. // It is also immutable, like a GreenNode, // but it contains parent pointers, offsets, and // has identity semantics. type SyntaxNode = rowan::SyntaxNode; #[allow(unused)] type SyntaxToken = rowan::SyntaxToken; #[allow(unused)] type SyntaxElement = rowan::NodeOrToken; impl Parse { #[cfg(test)] fn syntax(&self) -> SyntaxNode { SyntaxNode::new_root(self.green_node.clone()) } fn root_mut(&self) -> ChangeLog { ChangeLog::cast(SyntaxNode::new_root_mut(self.green_node.clone())).unwrap() } } macro_rules! ast_node { ($ast:ident, $kind:ident) => { #[derive(PartialEq, Eq, Hash)] #[repr(transparent)] /// A node in the changelog syntax tree. pub struct $ast(SyntaxNode); impl AstNode for $ast { type Language = Lang; fn can_cast(kind: SyntaxKind) -> bool { kind == $kind } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { Some(Self(syntax)) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.0 } } impl std::fmt::Display for $ast { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str(self.0.text().to_string().as_str()) } } }; } ast_node!(ChangeLog, ROOT); ast_node!(Entry, ENTRY); ast_node!(EntryHeader, ENTRY_HEADER); ast_node!(EntryBody, ENTRY_BODY); ast_node!(EntryFooter, ENTRY_FOOTER); ast_node!(Maintainer, MAINTAINER); ast_node!(Timestamp, TIMESTAMP); ast_node!(MetadataEntry, METADATA_ENTRY); ast_node!(MetadataKey, METADATA_KEY); ast_node!(MetadataValue, METADATA_VALUE); impl MetadataEntry { pub fn key(&self) -> Option { self.0 .children() .find_map(MetadataKey::cast) .map(|k| k.to_string()) } pub fn value(&self) -> Option { self.0 .children() .find_map(MetadataValue::cast) .map(|k| k.to_string()) } pub fn set_value(&mut self, value: &str) { let node = self .0 .children_with_tokens() .find(|it| it.kind() == METADATA_VALUE); let mut builder = GreenNodeBuilder::new(); builder.start_node(METADATA_VALUE.into()); builder.token(IDENTIFIER.into(), value); builder.finish_node(); let root = SyntaxNode::new_root_mut(builder.finish()); let range = if let Some(node) = node { node.index()..node.index() + 1 } else { let count = self.0.children().count(); count..count }; self.0.splice_children(range, vec![root.into()]); } } /// A builder for a changelog entry. pub struct EntryBuilder { root: SyntaxNode, package: Option, version: Option, distributions: Option>, urgency: Option, maintainer: Option<(String, String)>, timestamp: Option>, change_lines: Vec, } impl EntryBuilder { /// Set the package name #[must_use] pub fn package(mut self, package: String) -> Self { self.package = Some(package); self } /// Set the package version #[must_use] pub fn version(mut self, version: Version) -> Self { self.version = Some(version); self } /// Set the distribution(s) #[must_use] pub fn distributions(mut self, distributions: Vec) -> Self { self.distributions = Some(distributions); self } #[must_use] pub fn distribution(mut self, distribution: String) -> Self { self.distributions .get_or_insert_with(Vec::new) .push(distribution); self } #[must_use] pub fn urgency(mut self, urgency: Urgency) -> Self { self.urgency = Some(urgency); self } #[must_use] pub fn maintainer(mut self, maintainer: (String, String)) -> Self { self.maintainer = Some(maintainer); self } #[must_use] pub fn datetime(mut self, timestamp: chrono::DateTime) -> Self { self.timestamp = Some(timestamp); self } #[must_use] pub fn change_line(mut self, line: String) -> Self { self.change_lines.push(line); self } pub fn verify(&self) -> Result<(), String> { if self.package.is_none() { return Err("package is required".to_string()); } if self.version.is_none() { return Err("version is required".to_string()); } match self.distributions { None => { return Err("at least one distribution is required".to_string()); } Some(ref distributions) => { if distributions.is_empty() { return Err("at least one distribution is required".to_string()); } } } if self.change_lines.is_empty() { return Err("at least one change line is required".to_string()); } Ok(()) } fn metadata(&self) -> impl Iterator { let mut ret = vec![]; if let Some(urgency) = self.urgency.as_ref() { ret.push(("urgency".to_string(), urgency.to_string())); } ret.into_iter() } pub fn finish(self) -> Entry { if self.root.children().find_map(Entry::cast).is_some() { let mut builder = GreenNodeBuilder::new(); builder.start_node(EMPTY_LINE.into()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); let syntax = SyntaxNode::new_root_mut(builder.finish()); self.root.splice_children(0..0, vec![syntax.into()]); } let mut builder = GreenNodeBuilder::new(); builder.start_node(ENTRY.into()); builder.start_node(ENTRY_HEADER.into()); if let Some(package) = self.package.as_ref() { builder.token(IDENTIFIER.into(), package.as_str()); } if let Some(version) = self.version.as_ref() { builder.token(WHITESPACE.into(), " "); builder.token(VERSION.into(), format!("({})", version).as_str()); } if let Some(distributions) = self.distributions.as_ref() { builder.token(WHITESPACE.into(), " "); builder.start_node(DISTRIBUTIONS.into()); let mut it = distributions.iter().peekable(); while it.peek().is_some() { builder.token(IDENTIFIER.into(), it.next().unwrap()); if it.peek().is_some() { builder.token(WHITESPACE.into(), " "); } } builder.finish_node(); // DISTRIBUTIONS } let mut metadata = self.metadata().peekable(); if metadata.peek().is_some() { builder.token(SEMICOLON.into(), ";"); builder.token(WHITESPACE.into(), " "); builder.start_node(METADATA.into()); for (key, value) in metadata { builder.start_node(METADATA_ENTRY.into()); builder.start_node(METADATA_KEY.into()); builder.token(IDENTIFIER.into(), key.as_str()); builder.finish_node(); // METADATA_KEY builder.token(EQUALS.into(), "="); builder.start_node(METADATA_VALUE.into()); builder.token(METADATA_VALUE.into(), value.as_str()); builder.finish_node(); // METADATA_VALUE builder.finish_node(); // METADATA_ENTRY } builder.finish_node(); // METADATA } builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // ENTRY_HEADER builder.start_node(EMPTY_LINE.into()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // EMPTY_LINE for line in self.change_lines { builder.start_node(ENTRY_BODY.into()); builder.token(INDENT.into(), " "); builder.token(DETAIL.into(), line.as_str()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // ENTRY_BODY } builder.start_node(EMPTY_LINE.into()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // EMPTY_LINE builder.start_node(ENTRY_FOOTER.into()); builder.token(INDENT.into(), " -- "); if let Some(maintainer) = self.maintainer.as_ref() { builder.start_node(MAINTAINER.into()); let mut it = maintainer.0.split(' ').peekable(); while let Some(p) = it.next() { builder.token(TEXT.into(), p); if it.peek().is_some() { builder.token(WHITESPACE.into(), " "); } } builder.finish_node(); // MAINTAINER } if let Some(maintainer) = self.maintainer.as_ref() { builder.token(WHITESPACE.into(), " "); builder.token(EMAIL.into(), format!("<{}>", maintainer.1).as_str()); } if let Some(timestamp) = self.timestamp.as_ref() { builder.token(WHITESPACE.into(), " "); builder.start_node(TIMESTAMP.into()); let ts = timestamp.format("%a, %d %b %Y %H:%M:%S %z").to_string(); let mut it = ts.split(' ').peekable(); while let Some(p) = it.next() { builder.token(TEXT.into(), p); if it.peek().is_some() { builder.token(WHITESPACE.into(), " "); } } builder.finish_node(); // TIMESTAMP } builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // ENTRY_FOOTER builder.finish_node(); // ENTRY let syntax = SyntaxNode::new_root_mut(builder.finish()); self.root.splice_children(0..0, vec![syntax.clone().into()]); Entry(syntax) } } impl IntoIterator for ChangeLog { type Item = Entry; type IntoIter = std::vec::IntoIter; fn into_iter(self) -> Self::IntoIter { // TODO: This is inefficient self.iter().collect::>().into_iter() } } fn replay(builder: &mut GreenNodeBuilder, node: SyntaxNode) { builder.start_node(node.kind().into()); for child in node.children_with_tokens() { match child { SyntaxElement::Node(n) => replay(builder, n), SyntaxElement::Token(t) => { builder.token(t.kind().into(), t.text()); } } } builder.finish_node(); } impl FromIterator for ChangeLog { fn from_iter>(iter: T) -> Self { let mut builder = GreenNodeBuilder::new(); builder.start_node(ROOT.into()); for entry in iter { replay(&mut builder, entry.0.clone()); } builder.finish_node(); ChangeLog(SyntaxNode::new_root_mut(builder.finish())) } } impl ChangeLog { /// Create a new, empty changelog. pub fn new() -> ChangeLog { let mut builder = GreenNodeBuilder::new(); builder.start_node(ROOT.into()); builder.finish_node(); let syntax = SyntaxNode::new_root_mut(builder.finish()); ChangeLog(syntax) } /// Returns an iterator over all entries in the changelog file. pub fn iter(&self) -> impl Iterator + '_ { self.0.children().filter_map(Entry::cast) } /// Returns an iterator over all entries in the changelog file. #[deprecated(since = "0.2.0", note = "use `iter` instead")] pub fn entries(&self) -> impl Iterator + '_ { self.iter() } /// Create a new, empty entry. pub fn new_empty_entry(&mut self) -> EntryBuilder { EntryBuilder { root: self.0.clone(), package: None, version: None, distributions: None, urgency: None, maintainer: None, timestamp: None, change_lines: vec![], } } fn first_valid_entry(&self) -> Option { self.iter().find(|entry| { entry.package().is_some() && entry.header().is_some() && entry.footer().is_some() }) } /// Return a builder for a new entry. pub fn new_entry(&mut self) -> EntryBuilder { let base_entry = self.first_valid_entry(); let package = base_entry .as_ref() .and_then(|first_entry| first_entry.package()); let mut version = base_entry .as_ref() .and_then(|first_entry| first_entry.version()); if let Some(version) = version.as_mut() { version.increment_debian(); } EntryBuilder { root: self.0.clone(), package, version, distributions: Some(vec!["UNRELEASED".into()]), urgency: Some(Urgency::default()), maintainer: crate::get_maintainer(), timestamp: Some(chrono::Utc::now().into()), change_lines: vec![], } } /// Add a change to the changelog. /// /// This will update the current changelog entry if it is considered /// unreleased. Otherwise, a new entry will be created. /// /// If there is an existing entry, the change will be added to the end of /// the entry. If the previous change was attributed to another author, /// a new section line ("[ Author Name ]") will be added as well. /// /// # Arguments /// * `change` - The change to add, e.g. &["* Fix a bug"] /// * `author` - The author of the change, e.g. ("John Doe", "john@example") pub fn auto_add_change( &mut self, change: &[&str], author: (String, String), datetime: Option>, urgency: Option, ) -> Entry { match self.first_valid_entry() { Some(entry) if entry.is_unreleased() != Some(false) => { // Add to existing entry entry.add_change_for_author(change, author); // TODO: set timestamp to std::cmp::max(entry.timestamp(), datetime) // TODO: set urgency to std::cmp::max(entry.urgency(), urgency) entry } Some(_entry) => { // Create new entry let mut builder = self.new_entry(); builder = builder.maintainer(author); if let Some(datetime) = datetime { builder = builder.datetime(datetime); } if let Some(urgency) = urgency { builder = builder.urgency(urgency); } for change in change { builder = builder.change_line(change.to_string()); } builder.finish() } None => { panic!("No existing entries found in changelog"); } } } /// Pop the first entry from the changelog. pub fn pop_first(&mut self) -> Option { let mut it = self.iter(); if let Some(entry) = it.next() { // Drop trailing newlines while let Some(sibling) = entry.0.next_sibling() { if sibling.kind() == EMPTY_LINE { sibling.detach(); } else { break; } } entry.0.detach(); Some(entry) } else { None } } /// Read a changelog file from a path pub fn read_path(path: impl AsRef) -> Result { let mut file = std::fs::File::open(path)?; Self::read(&mut file) } /// Read a changelog file from a reader pub fn read(mut r: R) -> Result { let mut buf = String::new(); r.read_to_string(&mut buf)?; Ok(buf.parse()?) } /// Read a changelog file from a reader, allowing for syntax errors pub fn read_relaxed(mut r: R) -> Result { let mut buf = String::new(); r.read_to_string(&mut buf)?; let parsed = parse(&buf); Ok(parsed.root_mut()) } /// Write the changelog to a writer pub fn write(&self, mut w: W) -> Result<(), Error> { let buf = self.to_string(); w.write_all(buf.as_bytes())?; Ok(()) } /// Write the changelog to a path pub fn write_to_path(&self, p: &std::path::Path) -> Result<(), Error> { let f = std::fs::File::create(p)?; self.write(f)?; Ok(()) } } impl Default for ChangeLog { fn default() -> Self { Self::new() } } impl FromStr for ChangeLog { type Err = ParseError; fn from_str(s: &str) -> Result { let parsed = parse(s); if parsed.errors.is_empty() { Ok(parsed.root_mut()) } else { Err(ParseError(parsed.errors)) } } } impl FromStr for Entry { type Err = ParseError; fn from_str(s: &str) -> Result { let cl: ChangeLog = s.parse()?; let mut entries = cl.iter(); let entry = entries .next() .ok_or_else(|| ParseError(vec!["no entries found".to_string()]))?; if entries.next().is_some() { return Err(ParseError(vec!["multiple entries found".to_string()])); } Ok(entry) } } impl EntryHeader { /// Returns the version of the entry. pub fn version(&self) -> Option { self.0.children_with_tokens().find_map(|it| { if let Some(token) = it.as_token() { if token.kind() == VERSION { let text = token.text()[1..token.text().len() - 1].to_string(); return Some(text.parse().unwrap()); } } None }) } fn replace_root(&mut self, new_root: SyntaxNode) { let parent = self.0.parent().unwrap(); parent.splice_children(self.0.index()..self.0.index() + 1, vec![new_root.into()]); self.0 = parent.children().nth(self.0.index()).unwrap(); } /// Returns the package name of the entry. pub fn package(&self) -> Option { self.0.children_with_tokens().find_map(|it| { if let Some(token) = it.as_token() { if token.kind() == IDENTIFIER { return Some(token.text().to_string()); } } None }) } /// Returns the distributions of the entry. pub fn distributions(&self) -> Option> { let node = self.0.children().find(|it| it.kind() == DISTRIBUTIONS); node.map(|node| { node.children_with_tokens() .filter_map(|it| { if let Some(token) = it.as_token() { if token.kind() == IDENTIFIER { return Some(token.text().to_string()); } } None }) .collect::>() }) } /// Set distributions for the entry. pub fn set_distributions(&mut self, _distributions: Vec) { let node = self .0 .children_with_tokens() .find(|it| it.kind() == DISTRIBUTIONS); let mut builder = GreenNodeBuilder::new(); builder.start_node(DISTRIBUTIONS.into()); for (i, distribution) in _distributions.iter().enumerate() { if i > 0 { builder.token(WHITESPACE.into(), " "); } builder.token(IDENTIFIER.into(), distribution); } builder.finish_node(); let (range, green) = if let Some(node) = node { ( node.index()..node.index() + 1, vec![builder.finish().into()], ) } else if let Some(version) = self .0 .children_with_tokens() .find(|it| it.kind() == VERSION) { ( version.index()..version.index() + 1, vec![ GreenToken::new(WHITESPACE.into(), " ").into(), builder.finish().into(), ], ) } else if let Some(metadata) = self .0 .children_with_tokens() .find(|it| it.kind() == METADATA) { ( metadata.index() - 1..metadata.index() - 1, vec![ GreenToken::new(WHITESPACE.into(), " ").into(), builder.finish().into(), ], ) } else { ( self.0.children().count()..self.0.children().count(), vec![ GreenToken::new(WHITESPACE.into(), " ").into(), builder.finish().into(), ], ) }; let new_root = SyntaxNode::new_root_mut(self.0.green().splice_children(range, green)); self.replace_root(new_root); } /// Set the version for the entry. pub fn set_version(&mut self, version: &Version) { // Find the version token let node = self .0 .children_with_tokens() .find(|it| it.kind() == VERSION); let (range, green) = if let Some(token) = node { ( token.index()..token.index() + 1, vec![GreenToken::new(VERSION.into(), &format!("({})", version)).into()], ) } else { let index = self .0 .children_with_tokens() .position(|it| it.kind() == IDENTIFIER) .unwrap_or(0); ( index + 1..index + 1, vec![ GreenToken::new(WHITESPACE.into(), " ").into(), GreenToken::new(VERSION.into(), &format!("({})", version)).into(), ], ) }; let new_root = SyntaxNode::new_root_mut(self.0.green().splice_children(range, green)); self.replace_root(new_root); } /// Set the package name for the entry. pub fn set_package(&mut self, package: String) { let node = self .0 .children_with_tokens() .find(|it| it.kind() == IDENTIFIER); let new_root = if let Some(token) = node { SyntaxNode::new_root_mut(self.0.green().splice_children( token.index()..token.index() + 1, vec![GreenToken::new(IDENTIFIER.into(), &package).into()], )) } else { SyntaxNode::new_root_mut(self.0.green().splice_children( 0..0, vec![ GreenToken::new(IDENTIFIER.into(), &package).into(), GreenToken::new(WHITESPACE.into(), " ").into(), ], )) }; self.replace_root(new_root); } /// Set extra metadata for the entry. pub fn set_metadata(&mut self, key: &str, value: &str) { // Find the appropriate metadata node if let Some(mut node) = self .metadata_nodes() .find(|it| it.key().map(|k| k == key).unwrap_or(false)) { node.set_value(value); } else if let Some(metadata) = self .0 .children_with_tokens() .find(|it| it.kind() == METADATA) { let mut builder = GreenNodeBuilder::new(); builder.start_node(METADATA_ENTRY.into()); builder.start_node(METADATA_KEY.into()); builder.token(IDENTIFIER.into(), key); builder.finish_node(); builder.token(EQUALS.into(), "="); builder.start_node(METADATA_VALUE.into()); builder.token(IDENTIFIER.into(), value); builder.finish_node(); builder.finish_node(); let metadata = metadata.as_node().unwrap(); let count = metadata.children_with_tokens().count(); self.0.splice_children( metadata.index()..metadata.index() + 1, vec![SyntaxNode::new_root_mut(metadata.green().splice_children( count..count, vec![ GreenToken::new(WHITESPACE.into(), " ").into(), builder.finish().into(), ], )) .into()], ); } else { let mut builder = GreenNodeBuilder::new(); builder.start_node(METADATA.into()); builder.token(SEMICOLON.into(), ";"); builder.token(WHITESPACE.into(), " "); builder.start_node(METADATA_ENTRY.into()); builder.start_node(METADATA_KEY.into()); builder.token(IDENTIFIER.into(), key); builder.finish_node(); builder.token(EQUALS.into(), "="); builder.start_node(METADATA_VALUE.into()); builder.token(IDENTIFIER.into(), value); builder.finish_node(); builder.finish_node(); let new_root = SyntaxNode::new_root_mut(builder.finish()); // Add either just after DISTRIBUTIONS if let Some(distributions) = self .0 .children_with_tokens() .find(|it| it.kind() == DISTRIBUTIONS) { self.0.splice_children( distributions.index() + 1..distributions.index() + 1, vec![new_root.into()], ); } else if let Some(nl) = self .0 .children_with_tokens() .find(|it| it.kind() == NEWLINE) { // Just before the newline self.0 .splice_children(nl.index()..nl.index(), vec![new_root.into()]); } else { let count = self.0.children_with_tokens().count(); self.0.splice_children(count..count, vec![new_root.into()]); } } } fn metadata_nodes(&self) -> impl Iterator + '_ { let node = self.0.children().find(|it| it.kind() == METADATA); node.into_iter().flat_map(|node| { node.children_with_tokens() .filter_map(|it| MetadataEntry::cast(it.into_node()?)) }) } pub fn metadata(&self) -> impl Iterator + '_ { self.metadata_nodes().filter_map(|entry| { if let (Some(key), Some(value)) = (entry.key(), entry.value()) { Some((key, value)) } else { None } }) } /// Returns the urgency of the entry.3 pub fn urgency(&self) -> Option { for (key, value) in self.metadata() { if key.as_str() == "urgency" { return Some(value.parse().unwrap()); } } None } } impl EntryFooter { pub fn email(&self) -> Option { self.0.children_with_tokens().find_map(|it| { if let Some(token) = it.as_token() { let text = token.text(); if token.kind() == EMAIL { return Some(text[1..text.len() - 1].to_string()); } } None }) } pub fn maintainer(&self) -> Option { self.0 .children() .find_map(Maintainer::cast) .map(|m| m.text()) .filter(|s| !s.is_empty()) } fn replace_root(&mut self, new_root: SyntaxNode) { let parent = self.0.parent().unwrap(); parent.splice_children(self.0.index()..self.0.index() + 1, vec![new_root.into()]); self.0 = parent.children().nth(self.0.index()).unwrap(); } /// Set the maintainer for the entry. pub fn set_maintainer(&mut self, maintainer: String) { let node = self .0 .children_with_tokens() .find(|it| it.kind() == MAINTAINER); let new_root = if let Some(node) = node { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index()..node.index() + 1, vec![GreenToken::new(MAINTAINER.into(), &maintainer).into()], )) } else if let Some(node) = self.0.children_with_tokens().find(|it| it.kind() == INDENT) { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index() + 1..node.index() + 1, vec![GreenToken::new(MAINTAINER.into(), &maintainer).into()], )) } else { SyntaxNode::new_root_mut(self.0.green().splice_children( 0..0, vec![ GreenToken::new(INDENT.into(), " -- ").into(), GreenToken::new(MAINTAINER.into(), &maintainer).into(), ], )) }; self.replace_root(new_root); } /// Set email for the entry. pub fn set_email(&mut self, _email: String) { let node = self.0.children_with_tokens().find(|it| it.kind() == EMAIL); let new_root = if let Some(node) = node { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index()..node.index() + 1, vec![GreenToken::new(EMAIL.into(), &format!("<{}>", _email)).into()], )) } else if let Some(node) = self .0 .children_with_tokens() .find(|it| it.kind() == MAINTAINER) { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index() + 1..node.index() + 1, vec![GreenToken::new(EMAIL.into(), &format!("<{}>", _email)).into()], )) } else if let Some(node) = self.0.children_with_tokens().find(|it| it.kind() == INDENT) { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index() + 1..node.index() + 1, vec![ GreenToken::new(MAINTAINER.into(), "").into(), GreenToken::new(WHITESPACE.into(), " ").into(), GreenToken::new(EMAIL.into(), &format!("<{}>", _email)).into(), ], )) } else { SyntaxNode::new_root_mut(self.0.green().splice_children( 0..0, vec![ GreenToken::new(INDENT.into(), " -- ").into(), GreenToken::new(MAINTAINER.into(), "").into(), GreenToken::new(WHITESPACE.into(), " ").into(), GreenToken::new(EMAIL.into(), &format!("<{}>", _email)).into(), ], )) }; self.replace_root(new_root); } pub fn timestamp(&self) -> Option { self.0 .children() .find_map(Timestamp::cast) .map(|m| m.text()) } /// Set timestamp for the entry. pub fn set_timestamp(&mut self, timestamp: String) { let node = self .0 .children_with_tokens() .find(|it| it.kind() == TIMESTAMP); let new_root = if let Some(node) = node { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index()..node.index() + 1, vec![GreenToken::new(TIMESTAMP.into(), ×tamp).into()], )) } else if let Some(node) = self.0.children_with_tokens().find(|it| it.kind() == INDENT) { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index() + 1..node.index() + 1, vec![GreenToken::new(TIMESTAMP.into(), ×tamp).into()], )) } else if let Some(node) = self.0.children_with_tokens().find(|it| it.kind() == EMAIL) { SyntaxNode::new_root_mut(self.0.green().splice_children( node.index() + 1..node.index() + 1, vec![GreenToken::new(TIMESTAMP.into(), ×tamp).into()], )) } else { SyntaxNode::new_root_mut(self.0.green().splice_children( 0..0, vec![ GreenToken::new(INDENT.into(), " -- ").into(), GreenToken::new(TIMESTAMP.into(), ×tamp).into(), ], )) }; self.replace_root(new_root); } } impl EntryBody { fn text(&self) -> String { self.0 .children_with_tokens() .filter_map(|it| { if let Some(token) = it.as_token() { if token.kind() == DETAIL { return Some(token.text().to_string()); } } None }) .collect::>() .concat() } } impl Timestamp { fn text(&self) -> String { self.0.text().to_string() } } impl Maintainer { fn text(&self) -> String { self.0.text().to_string() } } impl std::fmt::Debug for Entry { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut debug = f.debug_struct("Entry"); if let Some(package) = self.package() { debug.field("package", &package); } if let Some(version) = self.version() { debug.field("version", &version); } if let Some(urgency) = self.urgency() { debug.field("urgency", &urgency); } if let Some(maintainer) = self.maintainer() { debug.field("maintainer", &maintainer); } if let Some(email) = self.email() { debug.field("email", &email); } if let Some(timestamp) = self.timestamp() { debug.field("timestamp", ×tamp); } if let Some(distributions) = self.distributions() { debug.field("distributions", &distributions); } if let Some(urgency) = self.urgency() { debug.field("urgency", &urgency); } debug.field("body", &self.change_lines().collect::>()); debug.finish() } } impl Entry { fn header(&self) -> Option { self.0.children().find_map(EntryHeader::cast) } fn footer(&self) -> Option { self.0.children().find_map(EntryFooter::cast) } /// Return the package name of the entry. pub fn package(&self) -> Option { self.header().and_then(|h| h.package()) } /// Set the package name of the entry. pub fn set_package(&mut self, package: String) { self.header() .unwrap_or_else(|| self.create_header()) .set_package(package); } /// Return the version of the entry. pub fn version(&self) -> Option { self.header().and_then(|h| h.version()) } /// Set the version of the entry. pub fn set_version(&mut self, version: &Version) { self.header() .unwrap_or_else(|| self.create_header()) .set_version(version); } /// Return the distributions of the entry. pub fn distributions(&self) -> Option> { self.header().and_then(|h| h.distributions()) } /// Set the distributions for the entry pub fn set_distributions(&mut self, distributions: Vec) { self.header() .unwrap_or_else(|| self.create_header()) .set_distributions(distributions); } /// Returns the email address of the maintainer. pub fn email(&self) -> Option { self.footer().and_then(|f| f.email()) } /// Returns the name of the maintainer. pub fn maintainer(&self) -> Option { self.footer().and_then(|f| f.maintainer()) } /// Set the maintainer of the entry. pub fn set_maintainer(&mut self, maintainer: (String, String)) { let mut footer = self.footer().unwrap_or_else(|| self.create_footer()); footer.set_maintainer(maintainer.0); footer.set_email(maintainer.1); } /// Returns the timestamp of the entry, as the raw string. pub fn timestamp(&self) -> Option { self.footer().and_then(|f| f.timestamp()) } /// Set the timestamp of the entry. pub fn set_timestamp(&mut self, timestamp: String) { self.footer() .unwrap_or_else(|| self.create_footer()) .set_timestamp(timestamp); } /// Set the datetime of the entry. pub fn set_datetime(&mut self, datetime: DateTime) { self.set_timestamp(format!("{}", datetime.format("%a, %d %b %Y %H:%M:%S %z"))); } /// Returns the datetime of the entry. pub fn datetime(&self) -> Option> { self.timestamp().and_then(|ts| parse_time_string(&ts).ok()) } /// Returns the urgency of the entry. pub fn urgency(&self) -> Option { self.header().and_then(|h| h.urgency()) } fn create_header(&self) -> EntryHeader { let mut builder = GreenNodeBuilder::new(); builder.start_node(ENTRY_HEADER.into()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); let syntax = SyntaxNode::new_root_mut(builder.finish()); self.0.splice_children(0..0, vec![syntax.into()]); EntryHeader(self.0.children().next().unwrap().clone_for_update()) } fn create_footer(&self) -> EntryFooter { let mut builder = GreenNodeBuilder::new(); builder.start_node(ENTRY_FOOTER.into()); builder.token(NEWLINE.into(), "\n"); builder.finish_node(); let syntax = SyntaxNode::new_root_mut(builder.finish()); let count = self.0.children().count(); self.0.splice_children(count..count, vec![syntax.into()]); EntryFooter(self.0.children().last().unwrap().clone_for_update()) } /// Set the urgency of the entry. pub fn set_urgency(&mut self, urgency: Urgency) { self.set_metadata("urgency", urgency.to_string().as_str()); } /// Set a metadata key-value pair for the entry. pub fn set_metadata(&mut self, key: &str, value: &str) { self.header() .unwrap_or_else(|| self.create_header()) .set_metadata(key, value) } /// Add a change for the specified author /// /// If the author is not the same as the current maintainer, a new /// section will be created for the author in the entry (e.g. "[ John Doe ]"). pub fn add_change_for_author(&self, change: &[&str], author: (String, String)) { let changes_lines = self.change_lines().collect::>(); let by_author = crate::changes::changes_by_author(changes_lines.iter().map(|s| s.as_str())) .collect::>(); // There are no per author sections yet, so attribute current changes to changelog entry author if by_author.iter().all(|(a, _, _)| a.is_none()) { if let Some(maintainer_name) = self.maintainer() { if author.0 != maintainer_name { self.prepend_change_line( crate::changes::format_section_title(maintainer_name.as_str()).as_str(), ); if !self.change_lines().last().unwrap().is_empty() { self.append_change_line(""); } self.append_change_line( crate::changes::format_section_title(author.0.as_str()).as_str(), ); } } } else if let Some(last_section) = by_author.last().as_ref() { if last_section.0 != Some(author.0.as_str()) { self.append_change_line(""); self.append_change_line( crate::changes::format_section_title(author.0.as_str()).as_str(), ); } } if let Some(last) = self.change_lines().last() { if last.trim().is_empty() { self.pop_change_line(); } } for line in crate::textwrap::rewrap_changes(change.iter().copied()) { self.append_change_line(line.as_ref()); } } /// Prepend a change line to the entry pub fn prepend_change_line(&self, line: &str) { let mut builder = GreenNodeBuilder::new(); builder.start_node(ENTRY_BODY.into()); if !line.is_empty() { builder.token(INDENT.into(), " "); builder.token(DETAIL.into(), line); } builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // Insert just after the header let mut it = self.0.children(); let header = it.find(|n| n.kind() == ENTRY_HEADER); let previous_line = it.find(|n| n.kind() == EMPTY_LINE).or(header); let index = previous_line.map_or(0, |l| l.index() + 1); let syntax = SyntaxNode::new_root_mut(builder.finish()); self.0.splice_children(index..index, vec![syntax.into()]); } /// Pop the last change line from the entry pub fn pop_change_line(&self) -> Option { // Find the last child of type ENTRY_BODY let last_child = self.0.children().filter(|n| n.kind() == ENTRY_BODY).last(); if let Some(last_child) = last_child { let text = last_child.children_with_tokens().find_map(|it| { if let Some(token) = it.as_token() { if token.kind() == DETAIL { return Some(token.text().to_string()); } } None }); self.0 .splice_children(last_child.index()..last_child.index() + 1, vec![]); text } else { None } } /// Append a line to the changelog entry pub fn append_change_line(&self, line: &str) { let mut builder = GreenNodeBuilder::new(); builder.start_node(ENTRY_BODY.into()); if !line.is_empty() { builder.token(INDENT.into(), " "); builder.token(DETAIL.into(), line); } builder.token(NEWLINE.into(), "\n"); builder.finish_node(); // Find the last child of type ENTRY_BODY let last_child = self .0 .children() .filter(|n| n.kind() == ENTRY_BODY) .last() .unwrap_or_else(|| self.0.children().next().unwrap()); let syntax = SyntaxNode::new_root_mut(builder.finish()).into(); self.0 .splice_children(last_child.index() + 1..last_child.index() + 1, vec![syntax]); } /// Returns the changes of the entry. pub fn change_lines(&self) -> impl Iterator + '_ { let mut lines = self .0 .children() .filter_map(|n| { if let Some(ref change) = EntryBody::cast(n.clone()) { Some(change.text()) } else if n.kind() == EMPTY_LINE { Some("".to_string()) } else { None } }) .collect::>(); while let Some(last) = lines.last() { if last.is_empty() { lines.pop(); } else { break; } } lines.into_iter().skip_while(|it| it.is_empty()) } /// Ensure that the first line of the entry is the specified line /// /// If the first line is not the specified line, it will be prepended to the entry. pub fn ensure_first_line(&self, line: &str) { let first_line = self.change_lines().next().map(|it| it.trim().to_string()); if first_line != Some(line.to_string()) { self.prepend_change_line(line); } } /// Return whether the entry is marked as being unreleased pub fn is_unreleased(&self) -> Option { let distro_is_unreleased = self.distributions().as_ref().map(|ds| { let ds = ds.iter().map(|d| d.as_str()).collect::>(); crate::distributions_is_unreleased(ds.as_slice()) }); let footer_is_unreleased = if self.maintainer().is_none() && self.email().is_none() { Some(true) } else { None }; match (distro_is_unreleased, footer_is_unreleased) { (Some(true), _) => Some(true), (_, Some(true)) => Some(true), (Some(false), _) => Some(false), (_, Some(false)) => Some(false), _ => None, } } } const CHANGELOG_TIME_FORMAT: &str = "%a, %d %b %Y %H:%M:%S %z"; fn parse_time_string(time_str: &str) -> Result, chrono::ParseError> { DateTime::parse_from_str(time_str, CHANGELOG_TIME_FORMAT) } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_simple() { const CHANGELOG: &str = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 breezy (3.3.3-2) unstable; urgency=medium * Drop unnecessary dependency on python3-six. Closes: #1039011 * Drop dependency on cython3-dbg. Closes: #1040544 -- Jelmer Vernooij Sat, 24 Jun 2023 14:58:57 +0100 # Oh, and here is a comment "#; let parsed = parse(CHANGELOG); assert_eq!(parsed.errors, Vec::::new()); let node = parsed.syntax(); assert_eq!( format!("{:#?}", node), r###"ROOT@0..405 ENTRY@0..140 ENTRY_HEADER@0..39 IDENTIFIER@0..6 "breezy" WHITESPACE@6..7 " " VERSION@7..16 "(3.3.4-1)" WHITESPACE@16..17 " " DISTRIBUTIONS@17..25 IDENTIFIER@17..25 "unstable" METADATA@25..38 SEMICOLON@25..26 ";" WHITESPACE@26..27 " " METADATA_ENTRY@27..38 METADATA_KEY@27..34 IDENTIFIER@27..34 "urgency" EQUALS@34..35 "=" METADATA_VALUE@35..38 IDENTIFIER@35..38 "low" NEWLINE@38..39 "\n" EMPTY_LINE@39..40 NEWLINE@39..40 "\n" ENTRY_BODY@40..66 INDENT@40..42 " " DETAIL@42..65 "* New upstream release." NEWLINE@65..66 "\n" EMPTY_LINE@66..67 NEWLINE@66..67 "\n" ENTRY_FOOTER@67..140 INDENT@67..71 " -- " MAINTAINER@71..86 TEXT@71..77 "Jelmer" WHITESPACE@77..78 " " TEXT@78..86 "Vernooij" WHITESPACE@86..87 " " EMAIL@87..106 "" WHITESPACE@106..108 " " TIMESTAMP@108..139 TEXT@108..112 "Mon," WHITESPACE@112..113 " " TEXT@113..115 "04" WHITESPACE@115..116 " " TEXT@116..119 "Sep" WHITESPACE@119..120 " " TEXT@120..124 "2023" WHITESPACE@124..125 " " TEXT@125..133 "18:13:45" WHITESPACE@133..134 " " TEXT@134..139 "-0500" NEWLINE@139..140 "\n" EMPTY_LINE@140..141 NEWLINE@140..141 "\n" ENTRY@141..376 ENTRY_HEADER@141..183 IDENTIFIER@141..147 "breezy" WHITESPACE@147..148 " " VERSION@148..157 "(3.3.3-2)" WHITESPACE@157..158 " " DISTRIBUTIONS@158..166 IDENTIFIER@158..166 "unstable" METADATA@166..182 SEMICOLON@166..167 ";" WHITESPACE@167..168 " " METADATA_ENTRY@168..182 METADATA_KEY@168..175 IDENTIFIER@168..175 "urgency" EQUALS@175..176 "=" METADATA_VALUE@176..182 IDENTIFIER@176..182 "medium" NEWLINE@182..183 "\n" EMPTY_LINE@183..184 NEWLINE@183..184 "\n" ENTRY_BODY@184..249 INDENT@184..186 " " DETAIL@186..248 "* Drop unnecessary de ..." NEWLINE@248..249 "\n" ENTRY_BODY@249..302 INDENT@249..251 " " DETAIL@251..301 "* Drop dependency on ..." NEWLINE@301..302 "\n" EMPTY_LINE@302..303 NEWLINE@302..303 "\n" ENTRY_FOOTER@303..376 INDENT@303..307 " -- " MAINTAINER@307..322 TEXT@307..313 "Jelmer" WHITESPACE@313..314 " " TEXT@314..322 "Vernooij" WHITESPACE@322..323 " " EMAIL@323..342 "" WHITESPACE@342..344 " " TIMESTAMP@344..375 TEXT@344..348 "Sat," WHITESPACE@348..349 " " TEXT@349..351 "24" WHITESPACE@351..352 " " TEXT@352..355 "Jun" WHITESPACE@355..356 " " TEXT@356..360 "2023" WHITESPACE@360..361 " " TEXT@361..369 "14:58:57" WHITESPACE@369..370 " " TEXT@370..375 "+0100" NEWLINE@375..376 "\n" EMPTY_LINE@376..377 NEWLINE@376..377 "\n" COMMENT@377..405 "# Oh, and here is a c ..." "### ); let mut root = parsed.root_mut(); let entries: Vec<_> = root.iter().collect(); assert_eq!(entries.len(), 2); let entry = &entries[0]; assert_eq!(entry.package(), Some("breezy".into())); assert_eq!(entry.version(), Some("3.3.4-1".parse().unwrap())); assert_eq!(entry.distributions(), Some(vec!["unstable".into()])); assert_eq!(entry.urgency(), Some(Urgency::Low)); assert_eq!(entry.maintainer(), Some("Jelmer Vernooij".into())); assert_eq!(entry.email(), Some("jelmer@debian.org".into())); assert_eq!( entry.timestamp(), Some("Mon, 04 Sep 2023 18:13:45 -0500".into()) ); assert_eq!( entry.datetime(), Some("2023-09-04T18:13:45-05:00".parse().unwrap()) ); let changes_lines: Vec<_> = entry.change_lines().collect(); assert_eq!(changes_lines, vec!["* New upstream release.".to_string()]); assert_eq!(node.text(), CHANGELOG); let first = root.pop_first().unwrap(); assert_eq!(first.version(), Some("3.3.4-1".parse().unwrap())); assert_eq!( root.to_string(), r#"breezy (3.3.3-2) unstable; urgency=medium * Drop unnecessary dependency on python3-six. Closes: #1039011 * Drop dependency on cython3-dbg. Closes: #1040544 -- Jelmer Vernooij Sat, 24 Jun 2023 14:58:57 +0100 # Oh, and here is a comment "# ); } #[test] fn test_from_io_read() { let changelog = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#; let input = changelog.as_bytes(); let input = Box::new(std::io::Cursor::new(input)) as Box; let parsed = ChangeLog::read(input).unwrap(); assert_eq!(parsed.to_string(), changelog); } #[test] fn test_new_entry() { let mut cl = ChangeLog::new(); cl.new_entry() .package("breezy".into()) .version("3.3.4-1".parse().unwrap()) .distributions(vec!["unstable".into()]) .urgency(Urgency::Low) .maintainer(("Jelmer Vernooij".into(), "jelmer@debian.org".into())) .change_line("* A change.".into()) .datetime("2023-09-04T18:13:45-05:00".parse().unwrap()) .finish(); assert_eq!( r###"breezy (3.3.4-1) unstable; urgency=low * A change. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "###, cl.to_string() ); assert!(!cl.iter().next().unwrap().is_unreleased().unwrap()); } #[test] fn test_new_empty_default() { let mut cl = ChangeLog::new(); cl.new_entry() .package("breezy".into()) .version("3.3.4-1".parse().unwrap()) .maintainer(("Jelmer Vernooij".into(), "jelmer@debian.org".into())) .change_line("* A change.".into()) .datetime("2023-09-04T18:13:45-05:00".parse().unwrap()) .finish(); assert_eq!( r###"breezy (3.3.4-1) UNRELEASED; urgency=low * A change. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "###, cl.to_string() ); } #[test] fn test_new_empty_entry() { let mut cl = ChangeLog::new(); cl.new_empty_entry() .change_line("* A change.".into()) .finish(); assert_eq!( r###" * A change. -- "###, cl.to_string() ); assert_eq!(cl.iter().next().unwrap().is_unreleased(), Some(true)); } #[test] fn test_parse_invalid_line() { let text = r#"THIS IS NOT A PARSEABLE LINE lintian-brush (0.35) UNRELEASED; urgency=medium * Support updating templated debian/control files that use cdbs template. -- Joe Example Fri, 04 Oct 2019 02:36:13 +0000 "#; let cl = ChangeLog::read_relaxed(text.as_bytes()).unwrap(); let entry = cl.iter().nth(1).unwrap(); assert_eq!(entry.package(), Some("lintian-brush".into())); assert_eq!(entry.version(), Some("0.35".parse().unwrap())); assert_eq!(entry.urgency(), Some(Urgency::Medium)); assert_eq!(entry.maintainer(), Some("Joe Example".into())); assert_eq!(entry.email(), Some("joe@example.com".into())); assert_eq!(entry.distributions(), Some(vec!["UNRELEASED".into()])); assert_eq!( entry.datetime(), Some("2019-10-04T02:36:13+00:00".parse().unwrap()) ); } #[cfg(test)] mod entry_manipulate_tests { use super::*; #[test] fn test_append_change_line() { let mut cl = ChangeLog::new(); let entry = cl .new_empty_entry() .change_line("* A change.".into()) .finish(); entry.append_change_line("* Another change."); assert_eq!( r###" * A change. * Another change. -- "###, cl.to_string() ); } #[test] fn test_prepend_change_line() { let mut cl = ChangeLog::new(); let entry = cl .new_empty_entry() .change_line("* A change.".into()) .finish(); entry.prepend_change_line("* Another change."); assert_eq!( r###" * Another change. * A change. -- "###, cl.to_string() ); assert_eq!(entry.maintainer(), None); assert_eq!(entry.email(), None); assert_eq!(entry.timestamp(), None); assert_eq!(entry.package(), None); assert_eq!(entry.version(), None); } } #[cfg(test)] mod auto_add_change_tests { #[test] fn test_unreleased_existing() { let text = r#"lintian-brush (0.35) unstable; urgency=medium * This line already existed. [ Jane Example ] * And this one has an existing author. -- "#; let mut cl = super::ChangeLog::read(text.as_bytes()).unwrap(); let entry = cl.iter().next().unwrap(); assert_eq!(entry.package(), Some("lintian-brush".into())); assert_eq!(entry.is_unreleased(), Some(true)); let entry = cl.auto_add_change( &["* And this one is new."], ("Joe Example".to_string(), "joe@example.com".to_string()), None, None, ); assert_eq!(cl.iter().count(), 1); assert_eq!(entry.package(), Some("lintian-brush".into())); assert_eq!(entry.is_unreleased(), Some(true)); assert_eq!( entry.change_lines().collect::>(), &[ "* This line already existed.", "", "[ Jane Example ]", "* And this one has an existing author.", "", "[ Joe Example ]", "* And this one is new.", ] ); } } #[test] fn test_ensure_first_line() { let text = r#"lintian-brush (0.35) unstable; urgency=medium * This line already existed. [ Jane Example ] * And this one has an existing author. -- "#; let cl = ChangeLog::read(text.as_bytes()).unwrap(); let entry = cl.iter().next().unwrap(); assert_eq!(entry.package(), Some("lintian-brush".into())); entry.ensure_first_line("* QA upload."); entry.ensure_first_line("* QA upload."); assert_eq!( r#"lintian-brush (0.35) unstable; urgency=medium * QA upload. * This line already existed. [ Jane Example ] * And this one has an existing author. -- "#, cl.to_string() ); } #[test] fn test_set_version() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_version(&"3.3.5-1".parse().unwrap()); assert_eq!( r#"breezy (3.3.5-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#, entry.to_string() ); } #[test] fn test_set_package() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_package("bzr".into()); assert_eq!( r#"bzr (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#, entry.to_string() ); } #[test] fn test_set_distributions() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_distributions(vec!["unstable".into(), "experimental".into()]); assert_eq!( r#"breezy (3.3.4-1) unstable experimental; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#, entry.to_string() ); } #[test] fn test_set_maintainer() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_maintainer(("Joe Example".into(), "joe@example.com".into())); assert_eq!( r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Joe Example Mon, 04 Sep 2023 18:13:45 -0500 "#, entry.to_string() ); } #[test] fn test_set_timestamp() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_timestamp("Mon, 04 Sep 2023 18:13:46 -0500".into()); assert_eq!( r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:46 -0500 "#, entry.to_string() ); } #[test] fn test_set_datetime() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_datetime("2023-09-04T18:13:46-05:00".parse().unwrap()); assert_eq!( r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:46 -0500 "#, entry.to_string() ); } #[test] fn test_set_urgency() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_urgency(Urgency::Medium); assert_eq!( r#"breezy (3.3.4-1) unstable; urgency=medium * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#, entry.to_string() ); } #[test] fn test_set_metadata() { let mut entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.set_metadata("foo", "bar"); assert_eq!( r#"breezy (3.3.4-1) unstable; urgency=low foo=bar * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#, entry.to_string() ); } #[test] fn test_add_change_for_author() { let entry: Entry = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. [ Jelmer Vernooij ] * A change by the maintainer. -- Joe Example Mon, 04 Sep 2023 18:13:45 -0500 "# .parse() .unwrap(); entry.add_change_for_author( &["A change by the maintainer."], ("Jelmer Vernooij".into(), "jelmer@debian.org".into()), ); } #[test] fn test_changelog_from_entry_iter() { let text = r#"breezy (3.3.4-1) unstable; urgency=low * New upstream release. -- Jelmer Vernooij Mon, 04 Sep 2023 18:13:45 -0500 "#; let entry: Entry = text.parse().unwrap(); let cl = std::iter::once(entry).collect::(); assert_eq!(cl.to_string(), text); } } debian-changelog-0.2.0/src/textwrap.rs000064400000000000000000000346571046102023000160240ustar 00000000000000//! Text wrapping functions //! //! These functions are used to wrap text for use in a changelog. //! The main function is `textwrap`, which takes a string and wraps it to a //! specified width, without breaking in between "Closes: #XXXXXX" fragments. use lazy_regex::regex_captures; use std::borrow::Cow; use textwrap::core::Word; /// Default width for text wrapping pub const DEFAULT_WIDTH: usize = 78; /// Initial indent for text wrapping pub const INITIAL_INDENT: &str = "* "; #[inline] fn can_break_word(line: &str, pos: usize) -> bool { if let Some(_bugnp) = line.strip_prefix("Closes: #") { if pos < line.find('#').unwrap() { return false; } } if let Some(_lpbugno) = line.strip_prefix("LP: #") { if pos < line.find('#').unwrap() { return false; } } line[pos..].starts_with(' ') } #[cfg(test)] mod can_break_word_tests { #[test] fn test_can_break_word() { assert!(super::can_break_word("foo bar", 3)); assert!(!super::can_break_word("foo bar", 0)); assert!(!super::can_break_word("foo bar", 5)); } #[test] fn test_closes() { assert!(!super::can_break_word("Closes: #123456", 6)); assert!(!super::can_break_word("Closes: #123456", 7)); assert!(!super::can_break_word("Closes: #123456", 8)); assert!(!super::can_break_word("Closes: #123456", 9)); assert!(super::can_break_word("Closes: #123456 foo", 15)); } } fn find_words<'a>(line: &'a str) -> Box> + 'a> { let mut start = 0; let mut can_break = false; let mut char_indices = line.char_indices(); Box::new(std::iter::from_fn(move || { for (idx, ch) in char_indices.by_ref() { let word_finished = can_break && ch != ' '; can_break = can_break_word(&line[start..], idx - start); if word_finished { let word = Word::from(&line[start..idx]); start = idx; return Some(word); } } if start < line.len() { let word = Word::from(&line[start..]); start = line.len(); return Some(word); } None })) } #[cfg(test)] mod find_words_tests { use super::find_words; use textwrap::core::Word; use textwrap::WordSeparator; #[test] fn test_find_words() { let ws = WordSeparator::Custom(find_words); assert_eq!( vec![Word::from("foo")], ws.find_words("foo").collect::>() ); assert_eq!( vec![Word::from("foo "), Word::from("bar")], ws.find_words("foo bar").collect::>() ); } #[test] fn test_split_closes() { let ws = WordSeparator::Custom(find_words); assert_eq!( vec![ Word::from("This "), Word::from("test "), Word::from("Closes: #123456 "), Word::from("foo"), ], ws.find_words("This test Closes: #123456 foo") .collect::>() ); assert_eq!( vec![ Word::from("This "), Word::from("test "), Word::from("Closes: #123456"), ], ws.find_words("This test Closes: #123456") .collect::>() ); } } fn options<'a>( width: Option, initial_indent: Option<&'a str>, subsequent_indent: Option<&'a str>, ) -> textwrap::Options<'a> { let width = width.unwrap_or(DEFAULT_WIDTH); let mut options = textwrap::Options::new(width) .break_words(false) .word_splitter(textwrap::WordSplitter::NoHyphenation) .word_separator(textwrap::WordSeparator::Custom(find_words)); if let Some(initial_indent) = initial_indent { options = options.initial_indent(initial_indent); } if let Some(subsequent_indent) = subsequent_indent { options = options.subsequent_indent(subsequent_indent); } options } /// Wrap a string of text, without breaking in between "Closes: #XXXXXX" fragments pub fn textwrap<'a>( text: &'a str, width: Option, initial_indent: Option<&str>, subsequent_indent: Option<&str>, ) -> Vec> { let options = options(width, initial_indent, subsequent_indent); // Actual text wrapping using textwrap crate textwrap::wrap(text, options) } #[cfg(test)] mod textwrap_tests { #[test] fn test_wrap_closes() { assert_eq!( vec!["And", "this", "fixes", "something.", "Closes: #123456"], super::textwrap( "And this fixes something. Closes: #123456", Some(5), None, None ) ); } #[test] fn test_wrap() { let ws = textwrap::WordSeparator::Custom(super::find_words); let options = textwrap::Options::new(30) .break_words(false) .word_separator(ws); assert_eq!( vec!["This", "is", "a", "line", "that", "has", "been", "broken"], ws.find_words("This is a line that has been broken") .map(|w| w.to_string()) .collect::>() ); assert_eq!( vec!["This is a line that has been", "broken"], textwrap::wrap("This is a line that has been broken", options) ); assert_eq!( vec!["This is a line that has been", "broken"], super::textwrap("This is a line that has been broken", Some(30), None, None) ); } } /// Check if two lines can join fn can_join(line1: &str, line2: &str) -> bool { if line1.ends_with(':') { return false; } if let Some(first_char) = line2.chars().next() { if first_char.is_uppercase() { if line1.ends_with(']') || line1.ends_with('}') { return false; } if !line1.ends_with('.') { return false; } } } if line2.trim_start().starts_with('*') || line2.trim_start().starts_with('-') || line2.trim_start().starts_with('+') { return false; } // don't let lines with different indentation join let line1_indent = line1.len() - line1.trim_start_matches(' ').len(); let line2_indent = line2.len() - line2.trim_start_matches(' ').len(); if line1_indent != line2_indent { return false; } true } #[cfg(test)] mod can_join_tests { #[test] fn test_can_join() { assert!(super::can_join("This is a line.", "This is a line.")); assert!(super::can_join( "This is a line.", "This is a line. And this is another." )); assert!(!super::can_join( "This is a line.", "+ This is a submititem." )); assert!(!super::can_join( "This is a line introducing:", " * A list item." )); assert!(!super::can_join( " Lines with different indentation", " can not join." )); } } // Check if any lines are longer than the specified width fn any_long_lines(lines: &[&str], width: usize) -> bool { lines.iter().any(|line| line.len() > width) } #[derive(Debug, PartialEq)] /// Text wrapping error pub enum Error { /// Missing bullet point in a line MissingBulletPoint { /// Line with missing bullet point line: String, }, /// Unexpected indent in a line UnexpectedIndent { /// Line number lineno: usize, /// Line with unexpected indent line: String, /// Found indent indent: usize, }, } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Error::MissingBulletPoint { line } => { write!(f, "Missing bullet point in line: {}", line) } Error::UnexpectedIndent { lineno, line, indent, } => write!( f, "Unexpected indent in line {}: {} (expected {} spaces)", lineno, line, indent ), } } } impl std::error::Error for Error {} // Rewrap lines from a list of changes // // E.g.: // // * This is a long line that needs to be wrapped // // => // // * This is a short line that // needs to be wrappd // fn rewrap_change<'a>(change: &[&'a str], width: Option) -> Result>, Error> { let width = width.unwrap_or(DEFAULT_WIDTH); assert!(width > 4); if change.is_empty() { return Ok(vec![]); } let mut initial_indent = match regex_captures!(r"^[ ]*[\+\-\*] ", change[0]) { Some(initial_indent) => initial_indent.to_string(), None => { return Err(Error::MissingBulletPoint { line: change[0].to_string(), }) } }; let prefix_len = initial_indent.len(); if !any_long_lines(change, width) { return Ok(change.iter().map(|line| (*line).into()).collect()); } let mut subsequent_indent = " ".repeat(prefix_len); let mut lines = vec![&change[0][prefix_len..]]; // Strip the leading indentation for (lineno, line) in change[1..].iter().enumerate() { if line.len() < prefix_len { lines.push(&line[0..0]); } else if line.strip_prefix(subsequent_indent.as_str()).is_some() { lines.push(&line[initial_indent.len()..]); } else { return Err(Error::UnexpectedIndent { lineno, indent: subsequent_indent.len(), line: line.to_string(), }); } } let mut ret: Vec> = Vec::new(); let mut todo = vec![lines.remove(0)]; for line in lines.into_iter() { if can_join(todo.last().unwrap(), line) { todo.push(line); } else { ret.extend( textwrap( todo.join(" ").as_str(), Some(width), Some(initial_indent.as_str()), Some(subsequent_indent.as_str()), ) .iter() .map(|s| Cow::Owned(s.to_string())), ); initial_indent = " ".repeat(prefix_len + line.len() - line.trim_start_matches(' ').len()); subsequent_indent = " ".repeat(initial_indent.len()); todo = vec![line.trim_start_matches(' ')]; } } ret.extend( textwrap( todo.join(" ").as_str(), Some(width), Some(initial_indent.as_str()), Some(subsequent_indent.as_str()), ) .iter() .map(|s| Cow::Owned(s.to_string())), ); Ok(ret) } /// Rewrap lines from an iterator of changes pub fn rewrap_changes<'a>( changes: impl Iterator, ) -> impl Iterator> { let mut change = Vec::new(); let mut indent_len: Option = None; let mut ret = vec![]; for line in changes { // Start of a new change if let Some(indent) = regex_captures!(r"^[ ]*[\+\-\*] ", line) { ret.extend(rewrap_change(change.as_slice(), None).unwrap()); indent_len = Some(indent.len()); change = vec![line]; } else if let Some(current_indent) = indent_len { if line.starts_with(&" ".repeat(current_indent)) { change.push(line[current_indent..].into()); } else { ret.extend(rewrap_change(change.as_slice(), None).unwrap()); change = vec![line]; } } else { ret.extend(rewrap_change(change.as_slice(), None).unwrap()); ret.push(line.into()); } } if !change.is_empty() { ret.extend(rewrap_change(change.as_slice(), None).unwrap()); } ret.into_iter() } #[cfg(test)] mod rewrap_tests { use super::rewrap_change; const LONG_LINE: &str = "This is a very long line that could have been broken and should have been broken but was not broken."; #[test] fn test_too_short() { assert_eq!(Vec::<&str>::new(), rewrap_change(&[][..], None).unwrap()); assert_eq!( vec!["* Foo bar"], rewrap_change(&["* Foo bar"][..], None).unwrap() ); assert_eq!( vec!["* Foo", " bar"], rewrap_change(&["* Foo", " bar"][..], None).unwrap() ); assert_eq!( vec![" * Beginning", " next line"], rewrap_change(&[" * Beginning", " next line"][..], None).unwrap() ); } #[test] fn test_no_initial() { let long = "x".repeat(100); assert_eq!( super::Error::MissingBulletPoint { line: long.clone() }, rewrap_change(&[long.as_str()], None).unwrap_err() ); } #[test] fn test_wrap() { assert_eq!( vec![ super::Cow::Borrowed( "* This is a very long line that could have been broken and should have been" ), " broken but was not broken.".into() ], rewrap_change(&[format!("* {}", LONG_LINE).as_str()][..], None).unwrap() ); assert_eq!(r###" * Build-Depend on libsdl1.2-dev, libsdl-ttf2.0-dev and libsdl-mixer1.2-dev instead of with the embedded version, add -lSDL_ttf to --with-py-libs in debian/rules and rebootstrap (Closes: #382202)"###.split('\n').collect::>(), rewrap_change(r###" * Build-Depend on libsdl1.2-dev, libsdl-ttf2.0-dev and libsdl-mixer1.2-dev instead of with the embedded version, add -lSDL_ttf to --with-py-libs in debian/rules and rebootstrap (Closes: #382202) "###.split('\n').collect::>().as_slice(), None).unwrap()); } #[test] fn test_no_join() { assert_eq!(r###" - Translators know why this sign has been put here: _Choices: ${FOO}, !Other[ You only have to translate Other, remove the exclamation mark and this comment between brackets] Currently text, newt, slang and gtk frontends support this feature."###.split('\n').collect::>(), rewrap_change(r###" - Translators know why this sign has been put here: _Choices: ${FOO}, !Other[ You only have to translate Other, remove the exclamation mark and this comment between brackets] Currently text, newt, slang and gtk frontends support this feature. "###.split('\n').collect::>().as_slice(), None).unwrap()); } }