upstream-ontologist-0.2.4/.cargo_vcs_info.json0000644000000001360000000000100150760ustar { "git": { "sha1": "8d8536b7baf3ebe67436f55bc2d397414f7886c0" }, "path_in_vcs": "" }upstream-ontologist-0.2.4/Cargo.lock0000644000004530300000000000100130560ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "addr2line" version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "once_cell", "version_check", "zerocopy 0.7.35", ] [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "annotate-snippets" version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", "unicode-width 0.2.0", ] [[package]] name = "ansi_term" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" dependencies = [ "winapi", ] [[package]] name = "anstream" version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", "once_cell", "windows-sys 0.59.0", ] [[package]] name = "anyhow" version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "arbitrary" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" dependencies = [ "derive_arbitrary", ] [[package]] name = "arc-swap" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arrayref" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" [[package]] name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "ascii-canvas" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" dependencies = [ "term", ] [[package]] name = "async-trait" version = "0.1.88" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", "windows-targets 0.52.6", ] [[package]] name = "base16ct" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3" [[package]] name = "bit-set" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec", ] [[package]] name = "bit-vec" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" [[package]] name = "bitflags" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "bitmaps" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" dependencies = [ "typenum", ] [[package]] name = "blake3" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b17679a8d69b6d7fd9cd9801a536cec9fa5e5970b69f9d4747f70b39b031f5e7" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", ] [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "boxcar" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6740c6e2fc6360fa57c35214c7493826aee95993926092606f27c983b40837be" dependencies = [ "loom", ] [[package]] name = "breezyshim" version = "0.1.227" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffe63ba40f1cf5aed5af3577f45cc5692e69f9bbf7b12950a87dd2469b9c3183" dependencies = [ "chrono", "ctor", "lazy-regex", "lazy_static", "log", "patchkit", "percent-encoding", "pyo3", "pyo3-filelike", "serde", "tempfile", "url", ] [[package]] name = "bstr" version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "regex-automata 0.4.9", "serde", ] [[package]] name = "bumpalo" version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "bytes" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "bytesize" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d2c12f985c78475a6b8d629afd0c360260ef34cfef52efccdcfd31972f81c2e" [[package]] name = "cargo" version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62fdf5dbde4bf8d8149a4d32568d28d92af9dc4a4975727d89bd8dfb69fb810e" dependencies = [ "annotate-snippets", "anstream", "anstyle", "anyhow", "base64", "blake3", "bytesize", "cargo-credential", "cargo-credential-libsecret", "cargo-credential-macos-keychain", "cargo-credential-wincred", "cargo-platform", "cargo-util", "cargo-util-schemas", "clap", "clap_complete", "color-print", "crates-io", "curl", "curl-sys", "filetime", "flate2", "git2", "git2-curl", "gix", "glob", "hex", "hmac", "home", "http-auth", "humantime", "ignore", "im-rc", "indexmap", "itertools 0.13.0", "jobserver", "lazycell", "libc", "libgit2-sys", "memchr", "opener", "os_info", "pasetors", "pathdiff", "rand", "regex", "rusqlite", "rustc-hash 2.1.1", "rustc-stable-hash", "rustfix", "same-file", "semver", "serde", "serde-untagged", "serde_ignored", "serde_json", "sha1", "shell-escape", "supports-hyperlinks", "supports-unicode", "tar", "tempfile", "thiserror 1.0.69", "time", "toml", "toml_edit", "tracing", "tracing-chrome", "tracing-subscriber", "unicase", "unicode-width 0.2.0", "url", "walkdir", "windows-sys 0.59.0", ] [[package]] name = "cargo-credential" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac1ef5080adde1db190e901884d2c400990856c2a23201c5a181b910a6dbdf2a" dependencies = [ "anyhow", "libc", "serde", "serde_json", "thiserror 1.0.69", "time", "windows-sys 0.59.0", ] [[package]] name = "cargo-credential-libsecret" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2d33572942f4b5f59376b7041198c56b5585404c59172c62fff2372dedba102" dependencies = [ "anyhow", "cargo-credential", "libloading", ] [[package]] name = "cargo-credential-macos-keychain" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41e699cfa3f0a45e8973839768622302a03ab7148c126e96215c2e3e1fc82375" dependencies = [ "cargo-credential", "security-framework 3.2.0", ] [[package]] name = "cargo-credential-wincred" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62219d774bd2d26e803d75d7dfd234022173af157eb9d6f887016839db4d1f19" dependencies = [ "cargo-credential", "windows-sys 0.59.0", ] [[package]] name = "cargo-platform" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84982c6c0ae343635a3a4ee6dedef965513735c8b183caa7289fa6e27399ebd4" dependencies = [ "serde", ] [[package]] name = "cargo-util" version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "932c5376dc904ef005f0d229a5edc1116f40a78a18d30cdc992ec5acbeffd4d9" dependencies = [ "anyhow", "core-foundation 0.10.0", "filetime", "hex", "ignore", "jobserver", "libc", "miow", "same-file", "sha2", "shell-escape", "tempfile", "tracing", "walkdir", "windows-sys 0.59.0", ] [[package]] name = "cargo-util-schemas" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f905f68f8cb8a8182592d9858a5895360f0a5b08b6901fdb10498fb91829804" dependencies = [ "semver", "serde", "serde-untagged", "serde-value", "thiserror 1.0.69", "toml", "unicode-xid", "url", ] [[package]] name = "cc" version = "1.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" dependencies = [ "jobserver", "libc", "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "charset" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1f927b07c74ba84c7e5fe4db2baeb3e996ab2688992e39ac68ce3220a677c7e" dependencies = [ "base64", "encoding_rs", ] [[package]] name = "chrono" version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", "windows-link", ] [[package]] name = "chumsky" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ "hashbrown 0.14.5", "stacker", ] [[package]] name = "clap" version = "4.5.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e958897981290da2a852763fe9cdb89cd36977a5d729023127095fa94d95e2ff" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap_builder" version = "4.5.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83b0f35019843db2160b5bb19ae09b4e6411ac33fc6a712003c33e03090e2489" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", "terminal_size", ] [[package]] name = "clap_complete" version = "4.5.47" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06f5378ea264ad4f82bbc826628b5aad714a75abf6ece087e923010eb937fb6" dependencies = [ "clap", "clap_lex", "is_executable", "shlex", ] [[package]] name = "clap_derive" version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" dependencies = [ "heck", "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "clap_lex" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "clap_mangen" version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "724842fa9b144f9b89b3f3d371a89f3455eea660361d13a554f68f8ae5d6c13a" dependencies = [ "clap", "roff", ] [[package]] name = "clru" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cbd0f76e066e64fdc5631e3bb46381254deab9ef1158292f27c8c57e3bf3fe59" [[package]] name = "color-print" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3aa954171903797d5623e047d9ab69d91b493657917bdfb8c2c80ecaf9cdb6f4" dependencies = [ "color-print-proc-macro", ] [[package]] name = "color-print-proc-macro" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "692186b5ebe54007e45a59aea47ece9eb4108e141326c304cdc91699a7118a22" dependencies = [ "nom", "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "colorchoice" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "configparser" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e57e3272f0190c3f1584272d613719ba5fc7df7f4942fe542e63d949cf3a649b" [[package]] name = "const-oid" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const-random" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" dependencies = [ "const-random-macro", ] [[package]] name = "const-random-macro" version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ "getrandom 0.2.15", "once_cell", "tiny-keccak", ] [[package]] name = "constant_time_eq" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "core-foundation" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crates-io" version = "0.40.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3278cae111be507cd074ac10cac0ca4638c90911f978a849c1b96086381d347c" dependencies = [ "curl", "percent-encoding", "serde", "serde_json", "thiserror 1.0.69", "url", ] [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" [[package]] name = "crypto-bigint" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core", "subtle", "zeroize", ] [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "csv" version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" dependencies = [ "csv-core", "itoa", "ryu", "serde", ] [[package]] name = "csv-core" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" dependencies = [ "memchr", ] [[package]] name = "ct-codecs" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b916ba8ce9e4182696896f015e8a5ae6081b305f74690baa8465e35f5a142ea4" [[package]] name = "ctor" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e9666f4a9a948d4f1dff0c08a4512b0f7c86414b23960104c243c10d79f4c3" dependencies = [ "ctor-proc-macro", "dtor", ] [[package]] name = "ctor-proc-macro" version = "0.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f211af61d8efdd104f96e57adf5e426ba1bc3ed7a4ead616e15e5881fd79c4d" [[package]] name = "curl" version = "0.4.47" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9fb4d13a1be2b58f14d60adba57c9834b78c62fd86c3e76a148f732686e9265" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", "windows-sys 0.52.0", ] [[package]] name = "curl-sys" version = "0.4.80+curl-8.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55f7df2eac63200c3ab25bde3b2268ef2ee56af3d238e76d61f01c3c49bff734" dependencies = [ "cc", "libc", "libnghttp2-sys", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", "windows-sys 0.52.0", ] [[package]] name = "data-encoding" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "575f75dfd25738df5b91b8e43e14d44bda14637a58fae779fd2b064f8bf3e010" [[package]] name = "dbus" version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bb21987b9fb1613058ba3843121dd18b163b254d8a6e797e144cbac14d96d1b" dependencies = [ "libc", "libdbus-sys", "winapi", ] [[package]] name = "deb822-derive" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b6e5cafe61e77421a090e2a33b8a2e4e2ff1b106fd906ebade111307064d981" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "deb822-lossless" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812bb5c8052a89edc6d45d1bc3b3400e8186dd166e9b0a9520bfa5a2bd8477ee" dependencies = [ "deb822-derive", "regex", "rowan 0.16.1", "serde", ] [[package]] name = "debbugs" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6f8df1ed3ad28bd8380c05e317dcff67cd84957a334ccd713bfa81f9631f5dc" dependencies = [ "debversion", "lazy-regex", "log", "mailparse", "maplit", "reqwest", "tokio", "xmltree", ] [[package]] name = "debcargo" version = "2.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e23d67bc35dce555979a021e12d0e0fa1351a70a9f120405c19acebcd92b64" dependencies = [ "ansi_term", "anyhow", "cargo", "cargo-util", "cargo-util-schemas", "chrono", "clap", "clap_mangen", "env_logger", "filetime", "flate2", "git2", "glob", "itertools 0.13.0", "log", "regex", "semver", "serde", "serde_derive", "tar", "tempfile", "textwrap", "toml", "walkdir", ] [[package]] name = "debian-changelog" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98258e2066472d8d04bfe8a3fab2e7be77fe87b913dab2308a59f13061145814" dependencies = [ "chrono", "debversion", "lazy-regex", "log", "rowan 0.15.16", "textwrap", "whoami", ] [[package]] name = "debian-control" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9e04c1b1377047a3ee5ef6914f48187fd363f6e9287c3e6d72539f0a8cc2573" dependencies = [ "chrono", "deb822-lossless", "debversion", "regex", "rowan 0.16.1", "url", ] [[package]] name = "debian-copyright" version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e893a383c33c4e2689fd3c3121d6e82193211f65e7c483463c816f6b7c29857e" dependencies = [ "deb822-lossless", "debversion", "regex", ] [[package]] name = "debian-watch" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f692668701f5382bea79960cad4798f97689de3e22af638ee43552887c180f4f" dependencies = [ "debversion", "m_lexer", "rowan 0.16.1", "url", ] [[package]] name = "debversion" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b892997e53d52f9ac5c30bdac09cbea6bb1eeb3f93a204b8548774081a44b496" dependencies = [ "chrono", "lazy-regex", ] [[package]] name = "der" version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", "pem-rfc7468", "zeroize", ] [[package]] name = "deranged" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28cfac68e08048ae1883171632c2aef3ebc555621ae56fbccce1cbf22dd7f058" dependencies = [ "powerfmt", "serde", ] [[package]] name = "derive_arbitrary" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "const-oid", "crypto-common", "subtle", ] [[package]] name = "dirs-next" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ "cfg-if", "dirs-sys-next", ] [[package]] name = "dirs-sys-next" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", "redox_users", "winapi", ] [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "distro-info" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef12237f2ced990e453ec0b69230752e73be0a357817448c50a62f8bbbe0ca71" dependencies = [ "chrono", "csv", "failure", ] [[package]] name = "dlv-list" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" dependencies = [ "const-random", ] [[package]] name = "document_tree" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee380fad9396cb284d9ce0aeccfe179f128ec87ae816186a073a8702b04f8879" dependencies = [ "anyhow", "regex", "serde", "serde_derive", "url", ] [[package]] name = "dtor" version = "0.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "222ef136a1c687d4aa0395c175f2c4586e379924c352fd02f7870cf7de783c23" dependencies = [ "dtor-proc-macro", ] [[package]] name = "dtor-proc-macro" version = "0.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7454e41ff9012c00d53cf7f475c5e3afa3b91b7c90568495495e8d9bf47a1055" [[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "ecdsa" version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest", "elliptic-curve", "rfc6979", "signature", "spki", ] [[package]] name = "ed25519-compact" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9b3460f44bea8cd47f45a0c70892f1eff856d97cd55358b2f73f663789f6190" dependencies = [ "getrandom 0.2.15", ] [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "elliptic-curve" version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", "digest", "ff", "generic-array", "group", "hkdf", "pem-rfc7468", "pkcs8", "rand_core", "sec1", "subtle", "zeroize", ] [[package]] name = "ena" version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" dependencies = [ "log", ] [[package]] name = "encoding_rs" version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "env_filter" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" dependencies = [ "log", "regex", ] [[package]] name = "env_logger" version = "0.11.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3716d7a920fb4fac5d84e9d4bce8ceb321e9414b4409da61b07b75c1e3d0697" dependencies = [ "anstream", "anstyle", "env_filter", "jiff 0.2.5", "log", ] [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "erased-serde" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" dependencies = [ "serde", "typeid", ] [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys 0.59.0", ] [[package]] name = "failure" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" dependencies = [ "backtrace", "failure_derive", ] [[package]] name = "failure_derive" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "synstructure 0.12.6", ] [[package]] name = "fallible-iterator" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "fallible-streaming-iterator" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" [[package]] name = "faster-hex" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" dependencies = [ "serde", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "ff" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" dependencies = [ "rand_core", "subtle", ] [[package]] name = "fiat-crypto" version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "filetime" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.59.0", ] [[package]] name = "fixedbitset" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", "libz-sys", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foreign-types" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" dependencies = [ "foreign-types-shared", ] [[package]] name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "fs-err" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f89bda4c2a21204059a977ed3bfe746677dfd137b83c339e702b0ac91d482aa" dependencies = [ "autocfg", ] [[package]] name = "futf" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" dependencies = [ "mac", "new_debug_unreachable", ] [[package]] name = "futures" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", "futures-executor", "futures-io", "futures-sink", "futures-task", "futures-util", ] [[package]] name = "futures-channel" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", "futures-util", ] [[package]] name = "futures-io" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-macro" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "futures-sink" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", "futures-io", "futures-macro", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "generator" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" dependencies = [ "cfg-if", "libc", "log", "rustversion", "windows", ] [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", "zeroize", ] [[package]] name = "getopts" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" dependencies = [ "unicode-width 0.1.14", ] [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] [[package]] name = "getrandom" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", "wasm-bindgen", ] [[package]] name = "gimli" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "git2" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" dependencies = [ "bitflags", "libc", "libgit2-sys", "log", "openssl-probe", "openssl-sys", "url", ] [[package]] name = "git2-curl" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68ff14527a1c242320039b138376f8e0786697a1b7b172bc44f6efda3ab9079f" dependencies = [ "curl", "git2", "log", "url", ] [[package]] name = "gix" version = "0.69.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d0eebdaecdcf405d5433a36f85e4f058cf4de48ee2604388be0dbccbaad353e" dependencies = [ "gix-actor", "gix-attributes", "gix-command", "gix-commitgraph", "gix-config", "gix-credentials", "gix-date", "gix-diff", "gix-dir", "gix-discover", "gix-features", "gix-filter", "gix-fs", "gix-glob", "gix-hash", "gix-hashtable", "gix-ignore", "gix-index", "gix-lock", "gix-negotiate", "gix-object", "gix-odb", "gix-pack", "gix-path", "gix-pathspec", "gix-prompt", "gix-protocol", "gix-ref", "gix-refspec", "gix-revision", "gix-revwalk", "gix-sec", "gix-shallow", "gix-submodule", "gix-tempfile", "gix-trace", "gix-transport", "gix-traverse", "gix-url", "gix-utils", "gix-validate", "gix-worktree", "once_cell", "prodash", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-actor" version = "0.33.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20018a1a6332e065f1fcc8305c1c932c6b8c9985edea2284b3c79dc6fa3ee4b2" dependencies = [ "bstr", "gix-date", "gix-utils", "itoa", "thiserror 2.0.12", "winnow 0.6.26", ] [[package]] name = "gix-attributes" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddf9bf852194c0edfe699a2d36422d2c1f28f73b7c6d446c3f0ccd3ba232cadc" dependencies = [ "bstr", "gix-glob", "gix-path", "gix-quote", "gix-trace", "kstring", "smallvec", "thiserror 2.0.12", "unicode-bom", ] [[package]] name = "gix-bitmap" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1db9765c69502650da68f0804e3dc2b5f8ccc6a2d104ca6c85bc40700d37540" dependencies = [ "thiserror 2.0.12", ] [[package]] name = "gix-chunk" version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b1f1d8764958699dc764e3f727cef280ff4d1bd92c107bbf8acd85b30c1bd6f" dependencies = [ "thiserror 2.0.12", ] [[package]] name = "gix-command" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb410b84d6575db45e62025a9118bdbf4d4b099ce7575a76161e898d9ca98df1" dependencies = [ "bstr", "gix-path", "gix-trace", "shell-words", ] [[package]] name = "gix-commitgraph" version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8da6591a7868fb2b6dabddea6b09988b0b05e0213f938dbaa11a03dd7a48d85" dependencies = [ "bstr", "gix-chunk", "gix-features", "gix-hash", "memmap2", "thiserror 2.0.12", ] [[package]] name = "gix-config" version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6649b406ca1f99cb148959cf00468b231f07950f8ec438cc0903cda563606f19" dependencies = [ "bstr", "gix-config-value", "gix-features", "gix-glob", "gix-path", "gix-ref", "gix-sec", "memchr", "once_cell", "smallvec", "thiserror 2.0.12", "unicode-bom", "winnow 0.6.26", ] [[package]] name = "gix-config-value" version = "0.14.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11365144ef93082f3403471dbaa94cfe4b5e72743bdb9560719a251d439f4cee" dependencies = [ "bitflags", "bstr", "gix-path", "libc", "thiserror 2.0.12", ] [[package]] name = "gix-credentials" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82a50c56b785c29a151ab4ccf74a83fe4e21d2feda0d30549504b4baed353e0a" dependencies = [ "bstr", "gix-command", "gix-config-value", "gix-path", "gix-prompt", "gix-sec", "gix-trace", "gix-url", "thiserror 2.0.12", ] [[package]] name = "gix-date" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c57c477b645ee248b173bb1176b52dd528872f12c50375801a58aaf5ae91113f" dependencies = [ "bstr", "itoa", "jiff 0.1.29", "thiserror 2.0.12", ] [[package]] name = "gix-diff" version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8e92566eccbca205a0a0f96ffb0327c061e85bc5c95abbcddfe177498aa04f6" dependencies = [ "bstr", "gix-hash", "gix-object", "thiserror 2.0.12", ] [[package]] name = "gix-dir" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fba2ffbcf4bd34438e8a8367ccbc94870549903d1f193a14f47eb6b0967e1293" dependencies = [ "bstr", "gix-discover", "gix-fs", "gix-ignore", "gix-index", "gix-object", "gix-path", "gix-pathspec", "gix-trace", "gix-utils", "gix-worktree", "thiserror 2.0.12", ] [[package]] name = "gix-discover" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83bf6dfa4e266a4a9becb4d18fc801f92c3f7cc6c433dd86fdadbcf315ffb6ef" dependencies = [ "bstr", "dunce", "gix-fs", "gix-hash", "gix-path", "gix-ref", "gix-sec", "thiserror 2.0.12", ] [[package]] name = "gix-features" version = "0.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d85d673f2e022a340dba4713bed77ef2cf4cd737d2f3e0f159d45e0935fd81f" dependencies = [ "bytes", "crc32fast", "crossbeam-channel", "flate2", "gix-hash", "gix-trace", "gix-utils", "libc", "once_cell", "parking_lot", "prodash", "sha1_smol", "thiserror 2.0.12", "walkdir", ] [[package]] name = "gix-filter" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0ecdee5667f840ba20c7fe56d63f8e1dc1e6b3bfd296151fe5ef07c874790a" dependencies = [ "bstr", "encoding_rs", "gix-attributes", "gix-command", "gix-hash", "gix-object", "gix-packetline-blocking", "gix-path", "gix-quote", "gix-trace", "gix-utils", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-fs" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3d4fac505a621f97e5ce2c69fdc425742af00c0920363ca4074f0eb48b1db9" dependencies = [ "fastrand", "gix-features", "gix-utils", ] [[package]] name = "gix-glob" version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaf69a6bec0a3581567484bf99a4003afcaf6c469fd4214352517ea355cf3435" dependencies = [ "bitflags", "bstr", "gix-features", "gix-path", ] [[package]] name = "gix-hash" version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b5eccc17194ed0e67d49285e4853307e4147e95407f91c1c3e4a13ba9f4e4ce" dependencies = [ "faster-hex", "thiserror 2.0.12", ] [[package]] name = "gix-hashtable" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ef65b256631078ef733bc5530c4e6b1c2e7d5c2830b75d4e9034ab3997d18fe" dependencies = [ "gix-hash", "hashbrown 0.14.5", "parking_lot", ] [[package]] name = "gix-ignore" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6b1fb24d2a4af0aa7438e2771d60c14a80cf2c9bd55c29cf1712b841f05bb8a" dependencies = [ "bstr", "gix-glob", "gix-path", "gix-trace", "unicode-bom", ] [[package]] name = "gix-index" version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "270645fd20556b64c8ffa1540d921b281e6994413a0ca068596f97e9367a257a" dependencies = [ "bitflags", "bstr", "filetime", "fnv", "gix-bitmap", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-traverse", "gix-utils", "gix-validate", "hashbrown 0.14.5", "itoa", "libc", "memmap2", "rustix 0.38.44", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-lock" version = "15.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cd3ab68a452db63d9f3ebdacb10f30dba1fa0d31ac64f4203d395ed1102d940" dependencies = [ "gix-tempfile", "gix-utils", "thiserror 2.0.12", ] [[package]] name = "gix-negotiate" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d27f830a16405386e9c83b9d5be8261fe32bbd6b3caf15bd1b284c6b2b7ef1a8" dependencies = [ "bitflags", "gix-commitgraph", "gix-date", "gix-hash", "gix-object", "gix-revwalk", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-object" version = "0.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e42d58010183ef033f31088479b4eb92b44fe341b35b62d39eb8b185573d77ea" dependencies = [ "bstr", "gix-actor", "gix-date", "gix-features", "gix-hash", "gix-hashtable", "gix-path", "gix-utils", "gix-validate", "itoa", "smallvec", "thiserror 2.0.12", "winnow 0.6.26", ] [[package]] name = "gix-odb" version = "0.66.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb780eceb3372ee204469478de02eaa34f6ba98247df0186337e0333de97d0ae" dependencies = [ "arc-swap", "gix-date", "gix-features", "gix-fs", "gix-hash", "gix-hashtable", "gix-object", "gix-pack", "gix-path", "gix-quote", "parking_lot", "tempfile", "thiserror 2.0.12", ] [[package]] name = "gix-pack" version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4158928929be29cae7ab97afc8e820a932071a7f39d8ba388eed2380c12c566c" dependencies = [ "clru", "gix-chunk", "gix-features", "gix-hash", "gix-hashtable", "gix-object", "gix-path", "gix-tempfile", "memmap2", "parking_lot", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-packetline" version = "0.18.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7e5ae6bc3ac160a6bf44a55f5537813ca3ddb08549c0fd3e7ef699c73c439cd" dependencies = [ "bstr", "faster-hex", "gix-trace", "thiserror 2.0.12", ] [[package]] name = "gix-packetline-blocking" version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1cbf8767c6abd5a6779f586702b5bcd8702380f4208219449cf1c9d0cd1e17c" dependencies = [ "bstr", "faster-hex", "gix-trace", "thiserror 2.0.12", ] [[package]] name = "gix-path" version = "0.10.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c40f12bb65a8299be0cfb90fe718e3be236b7a94b434877012980863a883a99f" dependencies = [ "bstr", "gix-trace", "home", "once_cell", "thiserror 2.0.12", ] [[package]] name = "gix-pathspec" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c472dfbe4a4e96fcf7efddcd4771c9037bb4fdea2faaabf2f4888210c75b81e" dependencies = [ "bitflags", "bstr", "gix-attributes", "gix-config-value", "gix-glob", "gix-path", "thiserror 2.0.12", ] [[package]] name = "gix-prompt" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79f2185958e1512b989a007509df8d61dca014aa759a22bee80cfa6c594c3b6d" dependencies = [ "gix-command", "gix-config-value", "parking_lot", "rustix 0.38.44", "thiserror 2.0.12", ] [[package]] name = "gix-protocol" version = "0.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c84642e8b6fed7035ce9cc449593019c55b0ec1af7a5dce1ab8a0636eaaeb067" dependencies = [ "bstr", "gix-credentials", "gix-date", "gix-features", "gix-hash", "gix-lock", "gix-negotiate", "gix-object", "gix-ref", "gix-refspec", "gix-revwalk", "gix-shallow", "gix-trace", "gix-transport", "gix-utils", "maybe-async", "thiserror 2.0.12", "winnow 0.6.26", ] [[package]] name = "gix-quote" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e49357fccdb0c85c0d3a3292a9f6db32d9b3535959b5471bb9624908f4a066c6" dependencies = [ "bstr", "gix-utils", "thiserror 2.0.12", ] [[package]] name = "gix-ref" version = "0.49.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a91b61776c839d0f1b7114901179afb0947aa7f4d30793ca1c56d335dfef485f" dependencies = [ "gix-actor", "gix-features", "gix-fs", "gix-hash", "gix-lock", "gix-object", "gix-path", "gix-tempfile", "gix-utils", "gix-validate", "memmap2", "thiserror 2.0.12", "winnow 0.6.26", ] [[package]] name = "gix-refspec" version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00c056bb747868c7eb0aeb352c9f9181ab8ca3d0a2550f16470803500c6c413d" dependencies = [ "bstr", "gix-hash", "gix-revision", "gix-validate", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-revision" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61e1ddc474405a68d2ce8485705dd72fe6ce959f2f5fe718601ead5da2c8f9e7" dependencies = [ "bstr", "gix-commitgraph", "gix-date", "gix-hash", "gix-object", "gix-revwalk", "thiserror 2.0.12", ] [[package]] name = "gix-revwalk" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "510026fc32f456f8f067d8f37c34088b97a36b2229d88a6a5023ef179fcb109d" dependencies = [ "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-sec" version = "0.10.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d84dae13271f4313f8d60a166bf27e54c968c7c33e2ffd31c48cafe5da649875" dependencies = [ "bitflags", "gix-path", "libc", "windows-sys 0.52.0", ] [[package]] name = "gix-shallow" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d2673242e87492cb6ff671f0c01f689061ca306c4020f137197f3abc84ce01" dependencies = [ "bstr", "gix-hash", "gix-lock", "thiserror 2.0.12", ] [[package]] name = "gix-submodule" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2455f8c0fcb6ebe2a6e83c8f522d30615d763eb2ef7a23c7d929f9476e89f5c" dependencies = [ "bstr", "gix-config", "gix-path", "gix-pathspec", "gix-refspec", "gix-url", "thiserror 2.0.12", ] [[package]] name = "gix-tempfile" version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2feb86ef094cc77a4a9a5afbfe5de626897351bbbd0de3cb9314baf3049adb82" dependencies = [ "gix-fs", "libc", "once_cell", "parking_lot", "tempfile", ] [[package]] name = "gix-trace" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c396a2036920c69695f760a65e7f2677267ccf483f25046977d87e4cb2665f7" [[package]] name = "gix-transport" version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd04d91e507a8713cfa2318d5a85d75b36e53a40379cc7eb7634ce400ecacbaf" dependencies = [ "base64", "bstr", "curl", "gix-command", "gix-credentials", "gix-features", "gix-packetline", "gix-quote", "gix-sec", "gix-url", "thiserror 2.0.12", ] [[package]] name = "gix-traverse" version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed47d648619e23e93f971d2bba0d10c1100e54ef95d2981d609907a8cabac89" dependencies = [ "bitflags", "gix-commitgraph", "gix-date", "gix-hash", "gix-hashtable", "gix-object", "gix-revwalk", "smallvec", "thiserror 2.0.12", ] [[package]] name = "gix-url" version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d096fb733ba6bd3f5403dba8bd72bdd8809fe2b347b57844040b8f49c93492d9" dependencies = [ "bstr", "gix-features", "gix-path", "percent-encoding", "thiserror 2.0.12", "url", ] [[package]] name = "gix-utils" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff08f24e03ac8916c478c8419d7d3c33393da9bb41fa4c24455d5406aeefd35f" dependencies = [ "bstr", "fastrand", "unicode-normalization", ] [[package]] name = "gix-validate" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9eaa01c3337d885617c0a42e92823922a2aea71f4caeace6fe87002bdcadbd90" dependencies = [ "bstr", "thiserror 2.0.12", ] [[package]] name = "gix-worktree" version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "756dbbe15188fa22540d5eab941f8f9cf511a5364d5aec34c88083c09f4bea13" dependencies = [ "bstr", "gix-attributes", "gix-features", "gix-fs", "gix-glob", "gix-hash", "gix-ignore", "gix-index", "gix-object", "gix-path", "gix-validate", ] [[package]] name = "glob" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "globset" version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" dependencies = [ "aho-corasick", "bstr", "log", "regex-automata 0.4.9", "regex-syntax 0.8.5", ] [[package]] name = "group" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", "rand_core", "subtle", ] [[package]] name = "h2" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", "http", "indexmap", "slab", "tokio", "tokio-util", "tracing", ] [[package]] name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hashlink" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ "hashbrown 0.14.5", ] [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hkdf" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] [[package]] name = "hmac" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ "digest", ] [[package]] name = "home" version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "html5ever" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" dependencies = [ "log", "mac", "markup5ever", "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "http" version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", "itoa", ] [[package]] name = "http-auth" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "150fa4a9462ef926824cf4519c84ed652ca8f4fbae34cb8af045b5cbcaf98822" dependencies = [ "memchr", ] [[package]] name = "http-body" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http", ] [[package]] name = "http-body-util" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", "http", "http-body", "pin-project-lite", ] [[package]] name = "httparse" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "humantime" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" [[package]] name = "hyper" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", "h2", "http", "http-body", "httparse", "itoa", "pin-project-lite", "smallvec", "tokio", "want", ] [[package]] name = "hyper-rustls" version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" dependencies = [ "futures-util", "http", "hyper", "hyper-util", "rustls", "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", ] [[package]] name = "hyper-tls" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", "hyper", "hyper-util", "native-tls", "tokio", "tokio-native-tls", "tower-service", ] [[package]] name = "hyper-util" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", "http", "http-body", "hyper", "pin-project-lite", "socket2", "tokio", "tower-service", "tracing", ] [[package]] name = "iana-time-zone" version = "0.1.62" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2fd658b06e56721792c5df4475705b6cda790e9298d19d2f8af083457bcd127" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "log", "wasm-bindgen", "windows-core 0.52.0", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "icu_collections" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ "displaydoc", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locid" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_locid_transform" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", "icu_locid", "icu_locid_transform_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_locid_transform_data" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" [[package]] name = "icu_normalizer" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "utf16_iter", "utf8_iter", "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" [[package]] name = "icu_properties" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", "icu_locid_transform", "icu_properties_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_properties_data" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" [[package]] name = "icu_provider" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_provider_macros" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "idna" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "ignore" version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", "regex-automata 0.4.9", "same-file", "walkdir", "winapi-util", ] [[package]] name = "im-rc" version = "15.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe" dependencies = [ "bitmaps", "rand_core", "rand_xoshiro", "sized-chunks", "typenum", "version_check", ] [[package]] name = "indexmap" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" dependencies = [ "equivalent", "hashbrown 0.15.2", "serde", ] [[package]] name = "indoc" version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" [[package]] name = "ipnet" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "is-terminal" version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", "windows-sys 0.59.0", ] [[package]] name = "is_executable" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4a1b5bad6f9072935961dfbf1cced2f3d129963d091b6f69f007fe04e758ae2" dependencies = [ "winapi", ] [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itertools" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jiff" version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c04ef77ae73f3cf50510712722f0c4e8b46f5aaa1bf5ffad2ae213e6495e78e5" dependencies = [ "jiff-tzdb-platform", "log", "portable-atomic", "portable-atomic-util", "serde", "windows-sys 0.59.0", ] [[package]] name = "jiff" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c102670231191d07d37a35af3eb77f1f0dbf7a71be51a962dcd57ea607be7260" dependencies = [ "jiff-static", "log", "portable-atomic", "portable-atomic-util", "serde", ] [[package]] name = "jiff-static" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cdde31a9d349f1b1f51a0b3714a5940ac022976f4b49485fc04be052b183b4c" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "jiff-tzdb" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1283705eb0a21404d2bfd6eef2a7593d240bc42a0bdb39db0ad6fa2ec026524" [[package]] name = "jiff-tzdb-platform" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" dependencies = [ "jiff-tzdb", ] [[package]] name = "jobserver" version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "kstring" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ "static_assertions", ] [[package]] name = "lalrpop" version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" dependencies = [ "ascii-canvas", "bit-set", "diff", "ena", "is-terminal", "itertools 0.10.5", "lalrpop-util", "petgraph", "regex", "regex-syntax 0.6.29", "string_cache", "term", "tiny-keccak", "unicode-xid", ] [[package]] name = "lalrpop-util" version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" dependencies = [ "regex", ] [[package]] name = "lazy-regex" version = "3.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60c7310b93682b36b98fa7ea4de998d3463ccbebd94d935d6b48ba5b6ffa7126" dependencies = [ "lazy-regex-proc_macros", "once_cell", "regex", ] [[package]] name = "lazy-regex-proc_macros" version = "3.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ba01db5ef81e17eb10a5e0f2109d1b3a3e29bac3070fdbd7d156bf7dbd206a1" dependencies = [ "proc-macro2", "quote", "regex", "syn 2.0.100", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lazycell" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "libdbus-sys" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06085512b750d640299b79be4bad3d2fa90a9c00b1fd9e1b46364f66f0485c72" dependencies = [ "cc", "pkg-config", ] [[package]] name = "libgit2-sys" version = "0.17.0+1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" dependencies = [ "cc", "libc", "libssh2-sys", "libz-sys", "openssl-sys", "pkg-config", ] [[package]] name = "libloading" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets 0.52.6", ] [[package]] name = "libnghttp2-sys" version = "0.1.11+1.64.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6c24e48a7167cffa7119da39d577fa482e66c688a4aac016bee862e1a713c4" dependencies = [ "cc", "libc", ] [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags", "libc", "redox_syscall", ] [[package]] name = "libsqlite3-sys" version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ "cc", "pkg-config", "vcpkg", ] [[package]] name = "libssh2-sys" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", ] [[package]] name = "libz-sys" version = "1.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" [[package]] name = "litemap" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" [[package]] name = "lock_api" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "lockfree-object-pool" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" [[package]] name = "log" version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "loom" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" dependencies = [ "cfg-if", "generator", "scoped-tls", "tracing", "tracing-subscriber", ] [[package]] name = "m_lexer" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7e51ebf91162d585a5bae05e4779efc4a276171cb880d61dd6fab11c98467a7" dependencies = [ "regex", ] [[package]] name = "mac" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "mailparse" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3da03d5980411a724e8aaf7b61a7b5e386ec55a7fb49ee3d0ff79efc7e5e7c7e" dependencies = [ "charset", "data-encoding", "quoted_printable", ] [[package]] name = "makefile-lossless" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07ea491a96d7e6a435fa6723adfd304918c2569ac62e5d0ae506a0037f5beecb" dependencies = [ "log", "rowan 0.16.1", ] [[package]] name = "maplit" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" dependencies = [ "log", "phf", "phf_codegen", "string_cache", "string_cache_codegen", "tendril", ] [[package]] name = "markup5ever_rcdom" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9521dd6750f8e80ee6c53d65e2e4656d7de37064f3a7a5d2d11d05df93839c2" dependencies = [ "html5ever", "markup5ever", "tendril", "xml5ever", ] [[package]] name = "matchers" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" dependencies = [ "regex-automata 0.1.10", ] [[package]] name = "maybe-async" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" dependencies = [ "libc", ] [[package]] name = "memoffset" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] [[package]] name = "mime" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "minimal-lexical" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] [[package]] name = "mio" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] [[package]] name = "miow" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044" dependencies = [ "windows-sys 0.48.0", ] [[package]] name = "native-tls" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" dependencies = [ "libc", "log", "openssl", "openssl-probe", "openssl-sys", "schannel", "security-framework 2.11.1", "security-framework-sys", "tempfile", ] [[package]] name = "new_debug_unreachable" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "nom" version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", ] [[package]] name = "normpath" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8911957c4b1549ac0dc74e30db9c8b0e66ddcd6d7acc33098f4c63a64a6d7ed" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "nu-ansi-term" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" dependencies = [ "overload", "winapi", ] [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "object" version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "opam-file-rs" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dc9fde26706c9170630772dd86981d874e9a3107cc456c811e1ee234e0c4863" dependencies = [ "lalrpop", "lalrpop-util", "thiserror 1.0.69", ] [[package]] name = "opener" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0812e5e4df08da354c851a3376fead46db31c2214f849d3de356d774d057681" dependencies = [ "bstr", "dbus", "normpath", "windows-sys 0.59.0", ] [[package]] name = "openssl" version = "0.10.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd" dependencies = [ "bitflags", "cfg-if", "foreign-types", "libc", "once_cell", "openssl-macros", "openssl-sys", ] [[package]] name = "openssl-macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" version = "0.9.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "ordered-float" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "ordered-multimap" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" dependencies = [ "dlv-list", "hashbrown 0.14.5", ] [[package]] name = "orion" version = "0.17.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf2e0b749a7c5fb3d43f06f19eff59b253b5480fa146533676cea27c3606530b" dependencies = [ "fiat-crypto", "subtle", "zeroize", ] [[package]] name = "os_info" version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a604e53c24761286860eba4e2c8b23a0161526476b1de520139d69cdb85a6b5" dependencies = [ "log", "windows-sys 0.52.0", ] [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "p384" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" dependencies = [ "ecdsa", "elliptic-curve", "primeorder", "sha2", ] [[package]] name = "parking_lot" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-targets 0.52.6", ] [[package]] name = "pasetors" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c54944fa25a6e7c9c5b3315f118d360cc00d555cf53bb2b2fdf32dd31c71b729" dependencies = [ "ct-codecs", "ed25519-compact", "getrandom 0.3.2", "orion", "p384", "rand_core", "regex", "serde", "serde_json", "sha2", "subtle", "time", "zeroize", ] [[package]] name = "patchkit" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31e00ed52656b51f535293e40caf6579b4cf09f25e660fc6e6936ab70e78972f" dependencies = [ "chrono", "lazy-regex", "lazy_static", "once_cell", "regex", ] [[package]] name = "pathdiff" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "pem-rfc7468" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" dependencies = [ "base64ct", ] [[package]] name = "pep440_rs" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c" dependencies = [ "once_cell", "serde", "unicode-width 0.2.0", "unscanny", "version-ranges", ] [[package]] name = "pep508_rs" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "faee7227064121fcadcd2ff788ea26f0d8f2bd23a0574da11eca23bc935bcc05" dependencies = [ "boxcar", "indexmap", "itertools 0.13.0", "once_cell", "pep440_rs", "regex", "rustc-hash 2.1.1", "serde", "smallvec", "thiserror 1.0.69", "unicode-width 0.2.0", "url", "urlencoding", "version-ranges", ] [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6" dependencies = [ "memchr", "thiserror 2.0.12", "ucd-trie", ] [[package]] name = "pest_derive" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5" dependencies = [ "pest", "pest_generator", ] [[package]] name = "pest_generator" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "pest_meta" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0" dependencies = [ "once_cell", "pest", "sha2", ] [[package]] name = "petgraph" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", "indexmap", ] [[package]] name = "phf" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ "phf_shared 0.10.0", ] [[package]] name = "phf_codegen" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" dependencies = [ "phf_generator 0.10.0", "phf_shared 0.10.0", ] [[package]] name = "phf_generator" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared 0.10.0", "rand", ] [[package]] name = "phf_generator" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared 0.11.3", "rand", ] [[package]] name = "phf_shared" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ "siphasher 0.3.11", ] [[package]] name = "phf_shared" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher 1.0.1", ] [[package]] name = "pin-project-lite" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkcs8" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", "spki", ] [[package]] name = "pkg-config" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "portable-atomic" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] name = "portable-atomic-util" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" dependencies = [ "portable-atomic", ] [[package]] name = "powerfmt" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy 0.8.24", ] [[package]] name = "precomputed-hash" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "pretty_assertions" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", ] [[package]] name = "primeorder" version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro2" version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" version = "29.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ee7ce24c980b976607e2d6ae4aae92827994d23fed71659c3ede3f92528b58b" dependencies = [ "log", "parking_lot", ] [[package]] name = "psm" version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f58e5423e24c18cc840e1c98370b3993c6649cd1678b4d24318bcf0a083cbe88" dependencies = [ "cc", ] [[package]] name = "pulldown-cmark" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" dependencies = [ "bitflags", "getopts", "memchr", "pulldown-cmark-escape", "unicase", ] [[package]] name = "pulldown-cmark-escape" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" [[package]] name = "pyo3" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f402062616ab18202ae8319da13fa4279883a2b8a9d9f83f20dbade813ce1884" dependencies = [ "cfg-if", "chrono", "indoc", "libc", "memoffset", "once_cell", "portable-atomic", "pyo3-build-config", "pyo3-ffi", "pyo3-macros", "serde", "unindent", ] [[package]] name = "pyo3-build-config" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b14b5775b5ff446dd1056212d778012cbe8a0fbffd368029fd9e25b514479c38" dependencies = [ "once_cell", "target-lexicon", ] [[package]] name = "pyo3-ffi" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ab5bcf04a2cdcbb50c7d6105de943f543f9ed92af55818fd17b660390fc8636" dependencies = [ "libc", "pyo3-build-config", ] [[package]] name = "pyo3-filelike" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8b8fa7c3d2f7852d216243a0a1d94e0228f0a70237d5b944a69b6250ba7d618" dependencies = [ "pyo3", ] [[package]] name = "pyo3-macros" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fd24d897903a9e6d80b968368a34e1525aeb719d568dba8b3d4bfa5dc67d453" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", "syn 2.0.100", ] [[package]] name = "pyo3-macros-backend" version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36c011a03ba1e50152b4b394b479826cad97e7a21eb52df179cd91ac411cbfbe" dependencies = [ "heck", "proc-macro2", "pyo3-build-config", "quote", "syn 2.0.100", ] [[package]] name = "pyproject-toml" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "643af57c3f36ba90a8b53e972727d8092f7408a9ebfbaf4c3d2c17b07c58d835" dependencies = [ "indexmap", "pep440_rs", "pep508_rs", "serde", "thiserror 1.0.69", "toml", ] [[package]] name = "python-pkginfo" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c21f58880fc45e91d29b2f639ab4051aaa6a2b054534c2d343a953347d0dd600" dependencies = [ "flate2", "fs-err", "mailparse", "rfc2047-decoder", "tar", "thiserror 2.0.12", "zip", ] [[package]] name = "quote" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "quoted_printable" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "640c9bd8497b02465aeef5375144c26062e0dcd5939dfcbb0f5db76cb8c17c73" [[package]] name = "r-description" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94278584d73149797c58a3b0ae102343348e755515bad64a75709eea96930952" dependencies = [ "deb822-lossless", "rowan 0.16.1", "url", ] [[package]] name = "r-efi" version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" [[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.15", ] [[package]] name = "rand_xoshiro" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ "rand_core", ] [[package]] name = "redox_syscall" version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" dependencies = [ "bitflags", ] [[package]] name = "redox_users" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.15", "libredox", "thiserror 1.0.69", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata 0.4.9", "regex-syntax 0.8.5", ] [[package]] name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ "regex-syntax 0.6.29", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax 0.8.5", ] [[package]] name = "regex-syntax" version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" dependencies = [ "base64", "bytes", "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", "http", "http-body", "http-body-util", "hyper", "hyper-rustls", "hyper-tls", "hyper-util", "ipnet", "js-sys", "log", "mime", "native-tls", "once_cell", "percent-encoding", "pin-project-lite", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "system-configuration", "tokio", "tokio-native-tls", "tower", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "windows-registry", ] [[package]] name = "rfc2047-decoder" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc36545d1021456a751b573517cb52e8c339b2f662e6b2778ef629282678de29" dependencies = [ "base64", "charset", "chumsky", "memchr", "quoted_printable", "thiserror 2.0.12", ] [[package]] name = "rfc6979" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ "hmac", "subtle", ] [[package]] name = "ring" version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", "getrandom 0.2.15", "libc", "untrusted", "windows-sys 0.52.0", ] [[package]] name = "roff" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3" [[package]] name = "rowan" version = "0.15.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a542b0253fa46e632d27a1dc5cf7b930de4df8659dc6e720b647fc72147ae3d" dependencies = [ "countme", "hashbrown 0.14.5", "rustc-hash 1.1.0", "text-size", ] [[package]] name = "rowan" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "417a3a9f582e349834051b8a10c8d71ca88da4211e4093528e36b9845f6b5f21" dependencies = [ "countme", "hashbrown 0.14.5", "rustc-hash 1.1.0", "text-size", ] [[package]] name = "rst_renderer" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30f290036cf1302ee0732fbc8b78c2f416ebd8cbeaeccb7e0802b008553bddd0" dependencies = [ "anyhow", "document_tree", "serde-xml-rs", "serde_json", ] [[package]] name = "rusqlite" version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e" dependencies = [ "bitflags", "fallible-iterator", "fallible-streaming-iterator", "hashlink", "libsqlite3-sys", "smallvec", ] [[package]] name = "rust-ini" version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e310ef0e1b6eeb79169a1171daf9abcb87a2e17c03bee2c4bb100b55c75409f" dependencies = [ "cfg-if", "ordered-multimap", "trim-in-place", ] [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc-stable-hash" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08" [[package]] name = "rustfix" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f66156d7471ff4f12253cd7fd76dfe637a595a9418168154e8570f3947fe9a8" dependencies = [ "serde", "serde_json", "thiserror 1.0.69", "tracing", ] [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.4.15", "windows-sys 0.59.0", ] [[package]] name = "rustix" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e56a18552996ac8d29ecc3b190b4fdbb2d91ca4ec396de7bbffaf43f3d637e96" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.9.3", "windows-sys 0.59.0", ] [[package]] name = "rustls" version = "0.23.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "822ee9188ac4ec04a2f0531e55d035fb2de73f18b41a63c70c2712503b6fb13c" dependencies = [ "once_cell", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] [[package]] name = "rustls-pemfile" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ "rustls-pki-types", ] [[package]] name = "rustls-pki-types" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" [[package]] name = "rustls-webpki" version = "0.103.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03" dependencies = [ "ring", "rustls-pki-types", "untrusted", ] [[package]] name = "rustversion" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" [[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "schannel" version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "scoped-tls" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sec1" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct", "der", "generic-array", "pkcs8", "subtle", "zeroize", ] [[package]] name = "security-framework" version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags", "core-foundation 0.9.4", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ "bitflags", "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework-sys" version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "select" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5910c1d91bd7e6e178c0f8eb9e4ad01f814064b4a1c0ae3c906224a3cbf12879" dependencies = [ "bit-set", "html5ever", "markup5ever_rcdom", ] [[package]] name = "semver" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "serde" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde-untagged" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "299d9c19d7d466db4ab10addd5703e4c615dec2a5a16dbbafe191045e87ee66e" dependencies = [ "erased-serde", "serde", "typeid", ] [[package]] name = "serde-value" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ "ordered-float", "serde", ] [[package]] name = "serde-xml-rs" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65162e9059be2f6a3421ebbb4fef3e74b7d9e7c60c50a0e292c6239f19f1edfa" dependencies = [ "log", "serde", "thiserror 1.0.69", "xml-rs", ] [[package]] name = "serde_derive" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "serde_ignored" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "566da67d80e92e009728b3731ff0e5360cb181432b8ca73ea30bb1d170700d76" dependencies = [ "serde", ] [[package]] name = "serde_json" version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] [[package]] name = "serde_urlencoded" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", "itoa", "ryu", "serde", ] [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap", "itoa", "ryu", "serde", "unsafe-libyaml", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "sha1_smol" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "sha2" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "sharded-slab" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] [[package]] name = "shell-escape" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" [[package]] name = "shell-words" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] [[package]] name = "signature" version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", "rand_core", ] [[package]] name = "simd-adler32" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "siphasher" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "sized-chunks" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" dependencies = [ "bitmaps", "typenum", ] [[package]] name = "slab" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "smawk" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "socket2" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "spki" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", ] [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "stacker" version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601f9201feb9b09c00266478bf459952b9ef9a6b94edb2f21eba14ab681a60a9" dependencies = [ "cc", "cfg-if", "libc", "psm", "windows-sys 0.59.0", ] [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", "phf_shared 0.11.3", "precomputed-hash", "serde", ] [[package]] name = "string_cache_codegen" version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ "phf_generator 0.11.3", "phf_shared 0.11.3", "proc-macro2", "quote", ] [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "supports-hyperlinks" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b" [[package]] name = "supports-unicode" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "syn" version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] [[package]] name = "synstructure" version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "unicode-xid", ] [[package]] name = "synstructure" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "system-configuration" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags", "core-foundation 0.9.4", "system-configuration-sys", ] [[package]] name = "system-configuration-sys" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "tar" version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" dependencies = [ "filetime", "libc", "xattr", ] [[package]] name = "target-lexicon" version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ "fastrand", "getrandom 0.3.2", "once_cell", "rustix 1.0.3", "windows-sys 0.59.0", ] [[package]] name = "tendril" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" dependencies = [ "futf", "mac", "utf-8", ] [[package]] name = "term" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" dependencies = [ "dirs-next", "rustversion", "winapi", ] [[package]] name = "terminal_size" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" dependencies = [ "rustix 1.0.3", "windows-sys 0.59.0", ] [[package]] name = "text-size" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "textwrap" version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "smawk", "unicode-linebreak", "unicode-width 0.2.0", ] [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl 2.0.12", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "thiserror-impl" version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "thread_local" version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", ] [[package]] name = "time" version = "0.3.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" dependencies = [ "deranged", "itoa", "num-conv", "powerfmt", "serde", "time-core", "time-macros", ] [[package]] name = "time-core" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "time-macros" version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" dependencies = [ "num-conv", "time-core", ] [[package]] name = "tiny-keccak" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" dependencies = [ "crunchy", ] [[package]] name = "tinystr" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "tinyvec" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" version = "1.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a" dependencies = [ "backtrace", "bytes", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "tokio-native-tls" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", "tokio", ] [[package]] name = "tokio-rustls" version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ "rustls", "tokio", ] [[package]] name = "tokio-util" version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b9590b93e6fcc1739458317cccd391ad3955e2bde8913edf6f95f9e65a8f034" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", ] [[package]] name = "toml" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit", ] [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "winnow 0.7.4", ] [[package]] name = "tower" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", "sync_wrapper", "tokio", "tower-layer", "tower-service", ] [[package]] name = "tower-layer" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "tracing-chrome" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf0a738ed5d6450a9fb96e86a23ad808de2b727fd1394585da5cdd6788ffe724" dependencies = [ "serde_json", "tracing-core", "tracing-subscriber", ] [[package]] name = "tracing-core" version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", ] [[package]] name = "tracing-log" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ "log", "once_cell", "tracing-core", ] [[package]] name = "tracing-subscriber" version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", "once_cell", "regex", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", "tracing-log", ] [[package]] name = "trim-in-place" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typeid" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" [[package]] name = "typenum" version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "ucd-trie" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicase" version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-bom" version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" [[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-normalization" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "unicode-xid" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "unindent" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "unscanny" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47" [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "uo_rst_parser" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80277544afabbdd7ddd9117d7b5b2d61dba3819609f8810c619e590d8d1d8cdb" dependencies = [ "anyhow", "document_tree", "pest", "pest_derive", ] [[package]] name = "upstream-ontologist" version = "0.2.4" dependencies = [ "async-trait", "breezyshim", "chrono", "clap", "configparser", "debbugs", "debcargo", "debian-changelog", "debian-control", "debian-copyright", "debian-watch", "debversion", "distro-info", "env_logger", "futures", "gix-config", "html5ever", "lazy-regex", "lazy_static", "log", "makefile-lossless", "maplit", "opam-file-rs", "percent-encoding", "pretty_assertions", "pulldown-cmark", "pyo3", "pyproject-toml", "python-pkginfo", "quote", "r-description", "regex", "reqwest", "rst_renderer", "rust-ini", "select", "semver", "serde", "serde_json", "serde_yaml", "shlex", "tempfile", "textwrap", "tokio", "toml", "uo_rst_parser", "url", "xmltree", ] [[package]] name = "url" version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", ] [[package]] name = "urlencoding" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" [[package]] name = "utf-8" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf16_iter" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "valuable" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version-ranges" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8d079415ceb2be83fc355adbadafe401307d5c309c7e6ade6638e6f9f42f42d" dependencies = [ "smallvec", ] [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "want" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ "try-lock", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", "syn 2.0.100", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "whoami" version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" dependencies = [ "redox_syscall", "wasite", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" dependencies = [ "windows-core 0.58.0", "windows-targets 0.52.6", ] [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-core" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" dependencies = [ "windows-implement", "windows-interface", "windows-result 0.2.0", "windows-strings 0.1.0", "windows-targets 0.52.6", ] [[package]] name = "windows-implement" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "windows-interface" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "windows-link" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" [[package]] name = "windows-registry" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" dependencies = [ "windows-result 0.3.2", "windows-strings 0.3.1", "windows-targets 0.53.0", ] [[package]] name = "windows-result" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-result" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ "windows-result 0.2.0", "windows-targets 0.52.6", ] [[package]] name = "windows-strings" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" dependencies = [ "windows-link", ] [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.5", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-targets" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm 0.48.5", "windows_aarch64_msvc 0.48.5", "windows_i686_gnu 0.48.5", "windows_i686_msvc 0.48.5", "windows_x86_64_gnu 0.48.5", "windows_x86_64_gnullvm 0.48.5", "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows-targets" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" dependencies = [ "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", "windows_i686_gnu 0.53.0", "windows_i686_gnullvm 0.53.0", "windows_i686_msvc 0.53.0", "windows_x86_64_gnu 0.53.0", "windows_x86_64_gnullvm 0.53.0", "windows_x86_64_msvc 0.53.0", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" version = "0.6.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28" dependencies = [ "memchr", ] [[package]] name = "winnow" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e97b544156e9bebe1a0ffbc03484fc1ffe3100cbce3ffb17eac35f7cdd7ab36" dependencies = [ "memchr", ] [[package]] name = "wit-bindgen-rt" version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] [[package]] name = "write16" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "xattr" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" dependencies = [ "libc", "rustix 1.0.3", ] [[package]] name = "xml-rs" version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5b940ebc25896e71dd073bad2dbaa2abfe97b0a391415e22ad1326d9c54e3c4" [[package]] name = "xml5ever" version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650" dependencies = [ "log", "mac", "markup5ever", ] [[package]] name = "xmltree" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b619f8c85654798007fb10afa5125590b43b088c225a25fc2fec100a9fad0fc6" dependencies = [ "xml-rs", ] [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yoke" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", "synstructure 0.13.1", ] [[package]] name = "zerocopy" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "zerocopy-derive 0.7.35", ] [[package]] name = "zerocopy" version = "0.8.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" dependencies = [ "zerocopy-derive 0.8.24", ] [[package]] name = "zerocopy-derive" version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "zerocopy-derive" version = "0.8.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "zerofrom" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", "synstructure 0.13.1", ] [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] name = "zerovec" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", "syn 2.0.100", ] [[package]] name = "zip" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27c03817464f64e23f6f37574b4fdc8cf65925b5bfd2b0f2aedf959791941f88" dependencies = [ "arbitrary", "crc32fast", "crossbeam-utils", "flate2", "indexmap", "memchr", "zopfli", ] [[package]] name = "zopfli" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946" dependencies = [ "bumpalo", "crc32fast", "lockfree-object-pool", "log", "once_cell", "simd-adler32", ] upstream-ontologist-0.2.4/Cargo.toml0000644000000113610000000000100130760ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "upstream-ontologist" version = "0.2.4" authors = ["Jelmer Vernooij "] build = "build.rs" include = [ "testdata", "readme_tests", "man", "build.rs", "src/*.rs", "README.md", "src/**/*.rs", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false default-run = "guess-upstream-metadata" description = "tracking of upstream project metadata" homepage = "https://github.com/jelmer/upstream-ontologist" readme = "README.md" license = "GPL-2.0+" repository = "https://github.com/jelmer/upstream-ontologist.git" [features] cargo = ["dep:toml"] cli = [ "dep:clap", "dep:env_logger", ] debcargo = ["dep:debcargo"] debian = [ "dep:debian-watch", "dep:makefile-lossless", "dep:debian-changelog", "dep:debian-control", "dep:debian-copyright", "dep:debversion", ] debversion = ["dep:debversion"] default = [ "git-config", "launchpad", "opam", "dist-ini", "cargo", "r-description", "pyproject-toml", "python-pkginfo", "debian", "pyo3", "setup-cfg", ] dist-ini = ["dep:rust-ini"] git-config = ["dep:gix-config"] launchpad = ["dep:distro-info"] opam = ["dep:opam-file-rs"] pyo3 = ["dep:pyo3"] pyproject-toml = ["dep:pyproject-toml"] python-pkginfo = ["dep:python-pkginfo"] r-description = ["dep:r-description"] setup-cfg = ["dep:rust-ini"] [lib] name = "upstream_ontologist" path = "src/lib.rs" [[bin]] name = "autocodemeta" path = "src/bin/autocodemeta.rs" required-features = ["cli"] [[bin]] name = "autodoap" path = "src/bin/autodoap.rs" required-features = ["cli"] [[bin]] name = "guess-upstream-metadata" path = "src/bin/guess-upstream-metadata.rs" required-features = ["cli"] [dependencies.async-trait] version = "0.1.88" [dependencies.breezyshim] version = ">=0.1.227" features = ["auto-initialize"] default-features = false [dependencies.chrono] version = "0.4" [dependencies.clap] version = "4" features = [ "derive", "env", ] optional = true [dependencies.configparser] version = "3" [dependencies.debbugs] version = "0.1" [dependencies.debcargo] version = "2.7" optional = true [dependencies.debian-changelog] version = "0.2" optional = true [dependencies.debian-control] version = "0.1" optional = true [dependencies.debian-copyright] version = "0.1" optional = true [dependencies.debian-watch] version = "0.2" optional = true [dependencies.debversion] version = "0.4" optional = true [dependencies.distro-info] version = "0.4" optional = true [dependencies.env_logger] version = ">=0.10" optional = true [dependencies.futures] version = "0.3.30" [dependencies.gix-config] version = ">=0.40" optional = true [dependencies.html5ever] version = ">=0.26" [dependencies.lazy-regex] version = ">=2" [dependencies.lazy_static] version = "1" [dependencies.log] version = "0.4" [dependencies.makefile-lossless] version = "0.2" optional = true [dependencies.maplit] version = "1" [dependencies.opam-file-rs] version = "0.1" optional = true [dependencies.percent-encoding] version = "2" [dependencies.pulldown-cmark] version = ">=0.9" [dependencies.pyo3] version = ">=0.22,<0.24" optional = true [dependencies.pyproject-toml] version = "0.13" optional = true [dependencies.python-pkginfo] version = ">=0.5" optional = true [dependencies.r-description] version = ">=0.2.2" optional = true [dependencies.regex] version = "1" [dependencies.reqwest] version = ">=0.11" features = [ "blocking", "json", ] default-features = false [dependencies.rst_renderer] version = "0.4.0" [dependencies.rust-ini] version = ">=0.18" optional = true [dependencies.select] version = "0.6.0" [dependencies.semver] version = "1" features = ["serde"] [dependencies.serde] version = "1.0" features = ["derive"] [dependencies.serde_json] version = "1.0" [dependencies.serde_yaml] version = "0.9" [dependencies.shlex] version = "1.1.0" [dependencies.textwrap] version = ">=0.16" [dependencies.tokio] version = "1.44.0" features = ["full"] [dependencies.toml] version = ">=0.8" optional = true [dependencies.uo_rst_parser] version = "0.4.2" [dependencies.url] version = "2" [dependencies.xmltree] version = ">=0.10" [dev-dependencies.pretty_assertions] version = ">=1.4" [dev-dependencies.tempfile] version = ">=3" [build-dependencies.quote] version = "1.0.40" upstream-ontologist-0.2.4/Cargo.toml.orig000064400000000000000000000066461046102023000165710ustar 00000000000000[package] name = "upstream-ontologist" version = "0.2.4" authors = [ "Jelmer Vernooij ",] edition = "2021" license = "GPL-2.0+" description = "tracking of upstream project metadata" repository = "https://github.com/jelmer/upstream-ontologist.git" homepage = "https://github.com/jelmer/upstream-ontologist" default-run = "guess-upstream-metadata" include = ["testdata", "readme_tests", "man", "build.rs", "src/*.rs", "README.md", "src/**/*.rs"] [dependencies] log = "0.4" shlex = "1.1.0" serde_json = "1.0" lazy_static = "1" regex = "1" url = "2" xmltree = ">=0.10" configparser = "3" serde_yaml = "0.9" percent-encoding = "2" html5ever = ">=0.26" chrono = "0.4" textwrap = ">=0.16" lazy-regex = ">=2" breezyshim = { version = ">=0.1.227", default-features = false, features = ["auto-initialize"] } debian-watch = { version = "0.2", optional = true } debian-changelog = { version = "0.2", optional = true } debbugs = "0.1" clap = { version = "4", features = ["derive", "env"], optional = true } maplit = "1" env_logger = { version = ">=0.10", optional = true } makefile-lossless = { version = "0.2", optional = true } debian-copyright = { version = "0.1", optional = true } debian-control = { version = "0.1", optional = true } pulldown-cmark = ">=0.9" debcargo = { version = "2.7", optional = true } # See https://github.com/flying-sheep/rust-rst/issues/56 uo_rst_parser = "0.4.2" rst_renderer = "0.4.0" #rst_parser = { git = "https://github.com/flying-sheep/rust-rst" } #rst_renderer = { git = "https://github.com/flying-sheep/rust-rst" } select = "0.6.0" semver = { version = "1", features = ["serde"] } async-trait = "0.1.88" tokio = { version = "1.44.0", features = ["full"] } futures = "0.3.30" debversion = { version = "0.4", optional = true } [features] default = ["git-config", "launchpad", "opam", "dist-ini", "cargo", "r-description", "pyproject-toml", "python-pkginfo", "debian", "pyo3", "setup-cfg"] git-config = ["dep:gix-config"] launchpad = ["dep:distro-info"] opam = ["dep:opam-file-rs"] dist-ini = ["dep:rust-ini"] cargo = ["dep:toml"] r-description = ["dep:r-description"] pyproject-toml = ["dep:pyproject-toml"] python-pkginfo = ["dep:python-pkginfo"] setup-cfg = ["dep:rust-ini"] debcargo = ["dep:debcargo"] debian = ["dep:debian-watch", "dep:makefile-lossless", "dep:debian-changelog", "dep:debian-control", "dep:debian-copyright", "dep:debversion"] pyo3 = ["dep:pyo3"] cli = ["dep:clap", "dep:env_logger"] debversion = ["dep:debversion"] [lib] [dev-dependencies] pretty_assertions = ">=1.4" tempfile = ">=3" [build-dependencies] quote = "1.0.40" [dependencies.pyo3] version = ">=0.22,<0.24" optional = true [dependencies.reqwest] version = ">=0.11" features = [ "blocking", "json",] default-features = false [dependencies.rust-ini] version = ">=0.18" optional = true [dependencies.serde] version = "1.0" features = [ "derive",] [dependencies.opam-file-rs] version = "0.1" optional = true [dependencies.gix-config] version = ">=0.40" optional = true [dependencies.distro-info] version = "0.4" optional = true [dependencies.toml] version = ">=0.8" optional = true [dependencies.r-description] version = ">=0.2.2" optional = true [dependencies.pyproject-toml] version = "0.13" optional = true [dependencies.python-pkginfo] version = ">=0.5" optional = true [[bin]] name = "autodoap" required-features = ["cli"] [[bin]] name = "autocodemeta" required-features = ["cli"] [[bin]] name = "guess-upstream-metadata" required-features = ["cli"] upstream-ontologist-0.2.4/README.md000064400000000000000000000110421046102023000151430ustar 00000000000000Upstream Ontologist =================== The upstream ontologist provides a common interface for finding metadata about upstream software projects. It will gather information from any sources available, prioritize data that it has higher confidence in as well as report the confidence for each of the bits of metadata. The ontologist originated in Debian and the currently reported metadata fields are loosely based on [DEP-12](https://dep-team.pages.debian.net/deps/dep12), but it is meant to be distribution-agnostic. Provided Fields --------------- Standard fields: * ``Name``: human name of the upstream project * ``Contact``: contact address of some sort of the upstream (e-mail, mailing list URL) * ``Repository``: VCS URL * ``Repository-Browse``: Web URL for viewing the VCS * ``Bug-Database``: Bug database URL (for web viewing, generally) * ``Bug-Submit``: URL to use to submit new bugs (either on the web or an e-mail address) * ``Screenshots``: List of URLs with screenshots * ``Archive``: Archive used - e.g. SourceForge * ``Security-Contact``: e-mail or URL with instructions for reporting security issues * ``Documentation``: Link to documentation on the web Extensions for upstream-ontologist, not defined in DEP-12: * ``SourceForge-Project``: sourceforge project name * ``Wiki``: Wiki URL * ``Summary``: one-line description of the project * ``Description``: longer description of the project * ``License``: Single line license (e.g. "GPL 2.0") * ``Copyright``: List of copyright holders * ``Version``: Current upstream version * ``Security-MD``: URL to markdown file with security policy * ``Author``: List of people who contributed to the project * ``Maintainer``: The maintainer of the project * ``Funding``: URL to more information about funding * ``Homepage``: homepage URL (present in ``debian/control`` in Debian packages) Supported Data Sources ---------------------- At the moment, the ontologist can read metadata from the following upstream data sources: * Python package metadata (PKG-INFO, setup.py, setup.cfg, pyproject.timl) * [package.json](https://docs.npmjs.com/cli/v7/configuring-npm/package-json) * [composer.json](https://getcomposer.org/doc/04-schema.md) * [package.xml](https://pear.php.net/manual/en/guide.developers.package2.dependencies.php) * Perl package metadata (dist.ini, META.json, META.yml, Makefile.PL) * [Perl POD files](https://perldoc.perl.org/perlpod) * GNU configure files * [R DESCRIPTION files](https://r-pkgs.org/description.html) * [Rust Cargo.toml](https://doc.rust-lang.org/cargo/reference/manifest.html) * [Maven pom.xml](https://maven.apache.org/pom.html) * [metainfo.xml](https://www.freedesktop.org/software/appstream/docs/chap-Metadata.html) * [.git/config](https://git-scm.com/docs/git-config) * SECURITY.md * [DOAP](https://github.com/ewilderj/doap) * [Haskell cabal files](https://cabal.readthedocs.io/en/3.4/cabal-package.html) * [go.mod](https://golang.org/doc/modules/gomod-ref) * [ruby gemspec files](https://guides.rubygems.org/specification-reference/) * [nuspec files](https://docs.microsoft.com/en-us/nuget/reference/nuspec) * [OPAM files](https://opam.ocaml.org/doc/Manual.html#Package-definitions) * Debian packaging metadata (debian/watch, debian/control, debian/rules, debian/get-orig-source.sh, debian/copyright, debian/patches) * Dart's [pubspec.yaml](https://dart.dev/tools/pub/pubspec) * meson.build It will also scan README and INSTALL for possible upstream repository URLs (and will attempt to verify that those match the local repository). In addition to local files, it can also consult external directories using their APIs: * [GitHub](https://github.com/) * [SourceForge](https://sourceforge.net/) * [repology](https://www.repology.org/) * [Launchpad](https://launchpad.net/) * [PECL](https://pecl.php.net/) * [AUR](https://aur.archlinux.org/) Example Usage ------------- The easiest way to use the upstream ontologist is by invoking the ``guess-upstream-metadata`` command in a software project: ```console $ guess-upstream-metadata ~/src/dulwich Security-MD: https://github.com/dulwich/dulwich/tree/HEAD/SECURITY.md Name: dulwich Version: 0.20.15 Bug-Database: https://github.com/dulwich/dulwich/issues Repository: https://www.dulwich.io/code/ Summary: Python Git Library Bug-Submit: https://github.com/dulwich/dulwich/issues/new ``` Alternatively, there is a Python API as part of the ``upstream_ontologist`` Python package. There are also ``autocodemeta`` and ``autodoap`` commands that can generate output in the [codemeta](https://codemeta.github.io/) and [DOAP](https://github.com/ewilderj/doap) formats, respectively. upstream-ontologist-0.2.4/build.rs000064400000000000000000000153661046102023000153460ustar 00000000000000use quote::{format_ident, quote}; use std::env; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; fn generate_upstream_tests(testdata_dir: &Path, dest_path: &Path) -> std::io::Result<()> { let mut w = fs::File::create(dest_path)?; write!( w, "{}", quote! { use std::path::PathBuf; use pretty_assertions::assert_eq; } )?; for entry in fs::read_dir(testdata_dir).unwrap() { let entry = entry.unwrap(); let path = entry.path(); if path.is_dir() { // Get the directory name to use in the test function name let dir_name = path.file_name().unwrap().to_str().unwrap(); let fn_name = format_ident!("test_{}", dir_name.replace(['.', '-'], "_")); let test = quote! { #[tokio::test] async fn #fn_name() { let dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("testdata").join(#dir_name); let expected: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(dir.join("expected.yaml")).unwrap()).unwrap(); let actual: serde_yaml::Value = serde_yaml::to_value(crate::get_upstream_info(&dir, Some(true), Some(false), Some(false), Some(false)).await.unwrap()).unwrap(); assert_eq!(expected, actual); } }; writeln!(w, "{}", test)?; } } Ok(()) } fn generate_readme_tests(testdata_dir: &Path, dest_path: &Path) -> std::io::Result<()> { let mut w = fs::File::create(dest_path)?; let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); write!( w, "{}", quote! { use std::path::PathBuf; use pretty_assertions::assert_eq; use crate::readme::{description_from_readme_md, description_from_readme_rst, description_from_readme_plain}; } )?; for entry in fs::read_dir(testdata_dir).unwrap() { let entry = entry.unwrap(); let path = manifest_dir.join(entry.path()); if path.is_dir() { // Get the directory name to use in the test function name let dir_name = entry.file_name().to_str().unwrap().to_string(); if path.join("README.md").exists() { let fn_name = format_ident!("test_{}_readme_md", dir_name.replace(['.', '-'], "_")); let test = quote! { #[test] fn #fn_name() { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("readme_tests").join(#dir_name); let readme_md = std::fs::read_to_string(path.join("README.md")).unwrap(); let expected_description = if path.join("description").exists() { Some(std::fs::read_to_string(path.join("description")).unwrap()) } else { None }; let (actual_description, actual_md) = description_from_readme_md(&readme_md).unwrap(); let actual_md = serde_yaml::to_value(actual_md).unwrap(); assert_eq!(actual_description, expected_description); if path.join("expected.yaml").exists() { let expected_md: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(path.join("expected.yaml")).unwrap()).unwrap(); assert_eq!(actual_md, expected_md); } } }; write!(w, "{}", test)?; } else if path.join("README.rst").exists() { let fn_name = format_ident!("test_{}_readme_rst", dir_name.replace(['.', '-'], "_")); let test = quote! { #[test] fn #fn_name() { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("readme_tests").join(#dir_name); let readme_rst = std::fs::read_to_string(path.join("README.rst")).unwrap(); let expected_description = if path.join("description").exists() { Some(std::fs::read_to_string(path.join("description")).unwrap()) } else { None }; let (actual_description, actual_md) = description_from_readme_rst(&readme_rst).unwrap(); let actual_md = serde_yaml::to_value(actual_md).unwrap(); assert_eq!(actual_description, expected_description); if path.join("expected.yaml").exists() { let expected_md: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(path.join("expected.yaml")).unwrap()).unwrap(); assert_eq!(actual_md, expected_md); } } }; write!(w, "{}", test)?; } else { let fn_name = format_ident!("test_{}_readme_plain", dir_name.replace(['.', '-'], "_")); let test = quote! { #[test] fn #fn_name() { let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("readme_tests").join(#dir_name); let readme_plain = std::fs::read_to_string(path.join("README")).unwrap(); let expected_description = if path.join("description").exists() { Some(std::fs::read_to_string(path.join("description")).unwrap()) } else { None }; let (actual_description, actual_md) = description_from_readme_plain(&readme_plain).unwrap(); let actual_md = serde_yaml::to_value(actual_md).unwrap(); assert_eq!(actual_description, expected_description); if path.join("expected.yaml").exists() { let expected_md: serde_yaml::Value = serde_yaml::from_reader(std::fs::File::open(path.join("expected.yaml")).unwrap()).unwrap(); assert_eq!(actual_md, expected_md); } } }; write!(w, "{}", test)?; } } } Ok(()) } fn main() { let out_dir = env::var("OUT_DIR").unwrap(); generate_upstream_tests( Path::new("testdata"), &Path::new(&out_dir).join("upstream_tests.rs"), ) .unwrap(); generate_readme_tests( Path::new("readme_tests"), &Path::new(&out_dir).join("readme_tests.rs"), ) .unwrap(); } upstream-ontologist-0.2.4/man/autodoap.1000064400000000000000000000023471046102023000163450ustar 00000000000000.TH AUTODOAP 1 'September 2023' 'autodoap 0.1.36' 'User Commands' .SH NAME autodoap \- automatically write DOAP files for upstream projects .SH DESCRIPTION autodoap [\-h] [\-\-trust] [\-\-disable\-net\-access] [\-\-check] [\-\-consult\-external\-directory] [\-\-version] [path] This tool tries to guess upstream metadata (Homepage, Contact, VCS Repository) information for an upstream project. It does this by parsing various files in the package, and possibly calling out to external services (unless --disable-net-access is specified). Data is written to standard out in DOAP. .SS "positional arguments:" .IP path .SS "optional arguments:" .TP \fB\-h\fR, \fB\-\-help\fR show this help message and exit .TP \fB\-\-trust\fR Whether to allow running code from the package. .TP \fB\-\-disable\-net\-access\fR Do not probe external services. .TP \fB\-\-check\fR Check guessed metadata against external sources. .TP \fB\-\-consult\-external\-directory\fR Pull in external (not maintained by upstream) directory data .TP \fB\-\-version\fR show program's version number and exit .SH "SEE ALSO" \&\fIapply-multiarch-hints\fR\|(1) \&\fIguess-upstream-metadata\fR\|(1) \&\fIlintian-brush\fR\|(1) \&\fIlintian\fR\|(1) .SH AUTHORS Jelmer Vernooij upstream-ontologist-0.2.4/man/guess-upstream-metadata.1000064400000000000000000000023331046102023000212660ustar 00000000000000.TH GUESS-UPSTREAM-METADATA 1 'September 2023' 'guess-upstream-metadata 0.1.36' 'User Commands' .SH NAME guess-upstream-metadata \- guess upstream package metadata .SH DESCRIPTION guess\-upstream\-metadata [\-h] [\-\-trust] [\-\-disable\-net\-access] [\-\-check] [\-\-consult\-external\-directory] [\-\-version] [path] This tool tries to guess upstream metadata (Homepage, Contact, VCS Repository) for an upstream project. It does this by parsing various files in the package, and possibly calling out to external services (unless --disable-net-access is specified). .SS "positional arguments:" .IP path .SS "optional arguments:" .TP \fB\-h\fR, \fB\-\-help\fR show this help message and exit .TP \fB\-\-trust\fR Whether to allow running code from the package. .TP \fB\-\-disable\-net\-access\fR Do not probe external services. .TP \fB\-\-check\fR Check guessed metadata against external sources. .TP \fB\-\-consult\-external\-directory\fR Pull in external (not maintained by upstream) directory data .TP \fB\-\-version\fR show program's version number and exit .SH "SEE ALSO" \&\fIapply-multiarch-hints\fR\|(1) \&\fIguess-upstream-metadata\fR\|(1) \&\fIlintian-brush\fR\|(1) \&\fIlintian\fR\|(1) .SH AUTHORS Jelmer Vernooij upstream-ontologist-0.2.4/readme_tests/aiozipkin/README.rst000064400000000000000000000147421046102023000220410ustar 00000000000000aiozipkin ========= .. image:: https://github.com/aio-libs/aiozipkin/workflows/CI/badge.svg :target: https://github.com/aio-libs/aiozipkin/actions?query=workflow%3ACI .. image:: https://codecov.io/gh/aio-libs/aiozipkin/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiozipkin .. image:: https://api.codeclimate.com/v1/badges/1ff813d5cad2d702cbf1/maintainability :target: https://codeclimate.com/github/aio-libs/aiozipkin/maintainability :alt: Maintainability .. image:: https://img.shields.io/pypi/v/aiozipkin.svg :target: https://pypi.python.org/pypi/aiozipkin .. image:: https://readthedocs.org/projects/aiozipkin/badge/?version=latest :target: http://aiozipkin.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter **aiozipkin** is Python 3.6+ module that adds distributed tracing capabilities from asyncio_ applications with zipkin (http://zipkin.io) server instrumentation. zipkin_ is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. It manages both the collection and lookup of this data. Zipkin’s design is based on the Google Dapper paper. Applications are instrumented with **aiozipkin** report timing data to zipkin_. The Zipkin UI also presents a Dependency diagram showing how many traced requests went through each application. If you are troubleshooting latency problems or errors, you can filter or sort all traces based on the application, length of trace, annotation, or timestamp. .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/zipkin_animation2.gif :alt: zipkin ui animation Features ======== * Distributed tracing capabilities to **asyncio** applications. * Support zipkin_ ``v2`` protocol. * Easy to use API. * Explicit context handling, no thread local variables. * Can work with jaeger_ and stackdriver_ through zipkin compatible API. zipkin vocabulary ----------------- Before code lets learn important zipkin_ vocabulary, for more detailed information please visit https://zipkin.io/pages/instrumenting .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/zipkin_glossary.png :alt: zipkin ui glossary * **Span** represents one specific method (RPC) call * **Annotation** string data associated with a particular timestamp in span * **Tag** - key and value associated with given span * **Trace** - collection of spans, related to serving particular request Simple example -------------- .. code:: python import asyncio import aiozipkin as az async def run(): # setup zipkin client zipkin_address = 'http://127.0.0.1:9411/api/v2/spans' endpoint = az.create_endpoint( "simple_service", ipv4="127.0.0.1", port=8080) tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0) # create and setup new trace with tracer.new_trace(sampled=True) as span: # give a name for the span span.name("Slow SQL") # tag with relevant information span.tag("span_type", "root") # indicate that this is client span span.kind(az.CLIENT) # make timestamp and name it with START SQL query span.annotate("START SQL SELECT * FROM") # imitate long SQL query await asyncio.sleep(0.1) # make other timestamp and name it "END SQL" span.annotate("END SQL") await tracer.close() if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(run()) aiohttp example --------------- *aiozipkin* includes *aiohttp* server instrumentation, for this create `web.Application()` as usual and install aiozipkin plugin: .. code:: python import aiozipkin as az def init_app(): host, port = "127.0.0.1", 8080 app = web.Application() endpoint = az.create_endpoint("AIOHTTP_SERVER", ipv4=host, port=port) tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0) az.setup(app, tracer) That is it, plugin adds middleware that tries to fetch context from headers, and create/join new trace. Optionally on client side you can add propagation headers in order to force tracing and to see network latency between client and server. .. code:: python import aiozipkin as az endpoint = az.create_endpoint("AIOHTTP_CLIENT") tracer = await az.create(zipkin_address, endpoint) with tracer.new_trace() as span: span.kind(az.CLIENT) headers = span.context.make_headers() host = "http://127.0.0.1:8080/api/v1/posts/{}".format(i) resp = await session.get(host, headers=headers) await resp.text() Documentation ------------- http://aiozipkin.readthedocs.io/ Installation ------------ Installation process is simple, just:: $ pip install aiozipkin Support of other collectors =========================== **aiozipkin** can work with any other zipkin_ compatible service, currently we tested it with jaeger_ and stackdriver_. Jaeger support -------------- jaeger_ supports zipkin_ span format as result it is possible to use *aiozipkin* with jaeger_ server. You just need to specify *jaeger* server address and it should work out of the box. Not need to run local zipkin server. For more information see tests and jaeger_ documentation. .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/jaeger.png :alt: jaeger ui animation Stackdriver support ------------------- Google stackdriver_ supports zipkin_ span format as result it is possible to use *aiozipkin* with this google_ service. In order to make this work you need to setup zipkin service locally, that will send trace to the cloud. See google_ cloud documentation how to setup make zipkin collector: .. image:: https://raw.githubusercontent.com/aio-libs/aiozipkin/master/docs/stackdriver.png :alt: jaeger ui animation Requirements ------------ * Python_ 3.6+ * aiohttp_ .. _PEP492: https://www.python.org/dev/peps/pep-0492/ .. _Python: https://www.python.org .. _aiohttp: https://github.com/KeepSafe/aiohttp .. _asyncio: http://docs.python.org/3.5/library/asyncio.html .. _uvloop: https://github.com/MagicStack/uvloop .. _zipkin: http://zipkin.io .. _jaeger: http://jaeger.readthedocs.io/en/latest/ .. _stackdriver: https://cloud.google.com/stackdriver/ .. _google: https://cloud.google.com/trace/docs/zipkin upstream-ontologist-0.2.4/readme_tests/aiozipkin/description000064400000000000000000000013631046102023000226130ustar 00000000000000aiozipkin is Python 3.6+ module that adds distributed tracing capabilities from asyncio applications with zipkin (http://zipkin.io) server instrumentation. zipkin is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in microservice architectures. It manages both the collection and lookup of this data. Zipkin’s design is based on the Google Dapper paper. Applications are instrumented with aiozipkin report timing data to zipkin. The Zipkin UI also presents a Dependency diagram showing how many traced requests went through each application. If you are troubleshooting latency problems or errors, you can filter or sort all traces based on the application, length of trace, annotation, or timestamp. upstream-ontologist-0.2.4/readme_tests/argparse/README.rst000064400000000000000000000411161046102023000216430ustar 00000000000000ConfigArgParse -------------- .. image:: https://img.shields.io/pypi/v/ConfigArgParse.svg?style=flat :alt: PyPI version :target: https://pypi.python.org/pypi/ConfigArgParse .. image:: https://img.shields.io/pypi/pyversions/ConfigArgParse.svg :alt: Supported Python versions :target: https://pypi.python.org/pypi/ConfigArgParse .. image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master :alt: Travis CI build :target: https://travis-ci.org/bw2/ConfigArgParse Overview ~~~~~~~~ Applications with more than a handful of user-settable options are best configured through a combination of command line args, config files, hard-coded defaults, and in some cases, environment variables. Python's command line parsing modules such as argparse have very limited support for config files and environment variables, so this module extends argparse to add these features. Available on PyPI: http://pypi.python.org/pypi/ConfigArgParse .. image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master :target: https://travis-ci.org/bw2/ConfigArgParse Features ~~~~~~~~ - command-line, config file, env var, and default settings can now be defined, documented, and parsed in one go using a single API (if a value is specified in more than one way then: command line > environment variables > config file values > defaults) - config files can have .ini or .yaml style syntax (eg. key=value or key: value) - user can provide a config file via a normal-looking command line arg (eg. -c path/to/config.txt) rather than the argparse-style @config.txt - one or more default config file paths can be specified (eg. ['/etc/bla.conf', '~/.my_config'] ) - all argparse functionality is fully supported, so this module can serve as a drop-in replacement (verified by argparse unittests). - env vars and config file keys & syntax are automatically documented in the -h help message - new method :code:`print_values()` can report keys & values and where they were set (eg. command line, env var, config file, or default). - lite-weight (no 3rd-party library dependencies except (optionally) PyYAML) - extensible (:code:`ConfigFileParser` can be subclassed to define a new config file format) - unittested by running the unittests that came with argparse but on configargparse, and using tox to test with Python 2.7 and Python 3+ Example ~~~~~~~ *config_test.py*: Script that defines 4 options and a positional arg and then parses and prints the values. Also, it prints out the help message as well as the string produced by :code:`format_values()` to show what they look like. .. code:: py import configargparse p = configargparse.ArgParser(default_config_files=['/etc/app/conf.d/*.conf', '~/.my_settings']) p.add('-c', '--my-config', required=True, is_config_file=True, help='config file path') p.add('--genome', required=True, help='path to genome file') # this option can be set in a config file because it starts with '--' p.add('-v', help='verbose', action='store_true') p.add('-d', '--dbsnp', help='known variants .vcf', env_var='DBSNP_PATH') # this option can be set in a config file because it starts with '--' p.add('vcf', nargs='+', help='variant file(s)') options = p.parse_args() print(options) print("----------") print(p.format_help()) print("----------") print(p.format_values()) # useful for logging where different settings came from *config.txt:* Since the script above set the config file as required=True, lets create a config file to give it: .. code:: py # settings for config_test.py genome = HCMV # cytomegalovirus genome dbsnp = /data/dbsnp/variants.vcf *command line:* Now run the script and pass it the config file: .. code:: bash DBSNP_PATH=/data/dbsnp/variants_v2.vcf python config_test.py --my-config config.txt f1.vcf f2.vcf *output:* Here is the result: .. code:: bash Namespace(dbsnp='/data/dbsnp/variants_v2.vcf', genome='HCMV', my_config='config.txt', v=False, vcf=['f1.vcf', 'f2.vcf']) ---------- usage: config_test.py [-h] -c MY_CONFIG --genome GENOME [-v] [-d DBSNP] vcf [vcf ...] Args that start with '--' (eg. --genome) can also be set in a config file (/etc/app/conf.d/*.conf or ~/.my_settings or specified via -c). Config file syntax allows: key=value, flag=true, stuff=[a,b,c] (for details, see syntax at https://goo.gl/R74nmi). If an arg is specified in more than one place, then commandline values override environment variables which override config file values which override defaults. positional arguments: vcf variant file(s) optional arguments: -h, --help show this help message and exit -c MY_CONFIG, --my-config MY_CONFIG config file path --genome GENOME path to genome file -v verbose -d DBSNP, --dbsnp DBSNP known variants .vcf [env var: DBSNP_PATH] ---------- Command Line Args: --my-config config.txt f1.vcf f2.vcf Environment Variables: DBSNP_PATH: /data/dbsnp/variants_v2.vcf Config File (config.txt): genome: HCMV Special Values ~~~~~~~~~~~~~~ Under the hood, configargparse handles environment variables and config file values by converting them to their corresponding command line arg. For example, "key = value" will be processed as if "--key value" was specified on the command line. Also, the following special values (whether in a config file or an environment variable) are handled in a special way to support booleans and lists: - :code:`key = true` is handled as if "--key" was specified on the command line. In your python code this key must be defined as a boolean flag (eg. action="store_true" or similar). - :code:`key = [value1, value2, ...]` is handled as if "--key value1 --key value2" etc. was specified on the command line. In your python code this key must be defined as a list (eg. action="append"). Config File Syntax ~~~~~~~~~~~~~~~~~~ Only command line args that have a long version (eg. one that starts with '--') can be set in a config file. For example, "--color" can be set by putting "color=green" in a config file. The config file syntax depends on the constructor arg: :code:`config_file_parser_class` which can be set to one of the provided classes: :code:`DefaultConfigFileParser`, :code:`YAMLConfigFileParser`, :code:`ConfigparserConfigFileParser` or to your own subclass of the :code:`ConfigFileParser` abstract class. *DefaultConfigFileParser* - the full range of valid syntax is: .. code:: yaml # this is a comment ; this is also a comment (.ini style) --- # lines that start with --- are ignored (yaml style) ------------------- [section] # .ini-style section names are treated as comments # how to specify a key-value pair (all of these are equivalent): name value # key is case sensitive: "Name" isn't "name" name = value # (.ini style) (white space is ignored, so name = value same as name=value) name: value # (yaml style) --name value # (argparse style) # how to set a flag arg (eg. arg which has action="store_true") --name name name = True # "True" and "true" are the same # how to specify a list arg (eg. arg which has action="append") fruit = [apple, orange, lemon] indexes = [1, 12, 35 , 40] *YAMLConfigFileParser* - allows a subset of YAML syntax (http://goo.gl/VgT2DU) .. code:: yaml # a comment name1: value name2: true # "True" and "true" are the same fruit: [apple, orange, lemon] indexes: [1, 12, 35, 40] *ConfigparserConfigFileParser* - allows a subset of python's configparser module syntax (https://docs.python.org/3.7/library/configparser.html). In particular the following configparser options are set: .. code:: py config = configparser.ArgParser( delimiters=("=",":"), allow_no_value=False, comment_prefixes=("#",";"), inline_comment_prefixes=("#",";"), strict=True, empty_lines_in_values=False, ) Once configparser parses the config file all section names are removed, thus all keys must have unique names regardless of which INI section they are defined under. Also, any keys which have python list syntax are converted to lists by evaluating them as python code using ast.literal_eval (https://docs.python.org/3/library/ast.html#ast.literal_eval). To facilitate this all multi-line values are converted to single-line values. Thus multi-line string values will have all new-lines converted to spaces. Note, since key-value pairs that have python dictionary syntax are saved as single-line strings, even if formatted across multiple lines in the config file, dictionaries can be read in and converted to valid python dictionaries with PyYAML's safe_load. Example given below: .. code:: py # inside your config file (e.g. config.ini) [section1] # INI sections treated as comments system1_settings: { # start of multi-line dictionary 'a':True, 'b':[2, 4, 8, 16], 'c':{'start':0, 'stop':1000}, 'd':'experiment 32 testing simulation with parameter a on' } # end of multi-line dictionary value ....... # in your configargparse setup import configargparse import yaml parser = configargparse.ArgParser( config_file_parser_class=configargparse.ConfigparserConfigFileParser ) parser.add_argument('--system1_settings', type=yaml.safe_load) args = parser.parse_args() # now args.system1 is a valid python dict ArgParser Singletons ~~~~~~~~~~~~~~~~~~~~~~~~~ To make it easier to configure different modules in an application, configargparse provides globally-available ArgumentParser instances via configargparse.get_argument_parser('name') (similar to logging.getLogger('name')). Here is an example of an application with a utils module that also defines and retrieves its own command-line args. *main.py* .. code:: py import configargparse import utils p = configargparse.get_argument_parser() p.add_argument("-x", help="Main module setting") p.add_argument("--m-setting", help="Main module setting") options = p.parse_known_args() # using p.parse_args() here may raise errors. *utils.py* .. code:: py import configargparse p = configargparse.get_argument_parser() p.add_argument("--utils-setting", help="Config-file-settable option for utils") if __name__ == "__main__": options = p.parse_known_args() Help Formatters ~~~~~~~~~~~~~~~ :code:`ArgumentDefaultsRawHelpFormatter` is a new HelpFormatter that both adds default values AND disables line-wrapping. It can be passed to the constructor: :code:`ArgParser(.., formatter_class=ArgumentDefaultsRawHelpFormatter)` Aliases ~~~~~~~ The configargparse.ArgumentParser API inherits its class and method names from argparse and also provides the following shorter names for convenience: - p = configargparse.get_arg_parser() # get global singleton instance - p = configargparse.get_parser() - p = configargparse.ArgParser() # create a new instance - p = configargparse.Parser() - p.add_arg(..) - p.add(..) - options = p.parse(..) HelpFormatters: - RawFormatter = RawDescriptionHelpFormatter - DefaultsFormatter = ArgumentDefaultsHelpFormatter - DefaultsRawFormatter = ArgumentDefaultsRawHelpFormatter Design Notes ~~~~~~~~~~~~ Unit tests: tests/test_configargparse.py contains custom unittests for features specific to this module (such as config file and env-var support), as well as a hook to load and run argparse unittests (see the built-in test.test_argparse module) but on configargparse in place of argparse. This ensures that configargparse will work as a drop in replacement for argparse in all usecases. Previously existing modules (PyPI search keywords: config argparse): - argparse (built-in module Python v2.7+) - Good: - fully featured command line parsing - can read args from files using an easy to understand mechanism - Bad: - syntax for specifying config file path is unusual (eg. @file.txt)and not described in the user help message. - default config file syntax doesn't support comments and is unintuitive (eg. --namevalue) - no support for environment variables - ConfArgParse v1.0.15 (https://pypi.python.org/pypi/ConfArgParse) - Good: - extends argparse with support for config files parsed by ConfigParser - clear documentation in README - Bad: - config file values are processed using ArgumentParser.set_defaults(..) which means "required" and "choices" are not handled as expected. For example, if you specify a required value in a config file, you still have to specify it again on the command line. - doesn't work with Python 3 yet - no unit tests, code not well documented - appsettings v0.5 (https://pypi.python.org/pypi/appsettings) - Good: - supports config file (yaml format) and env_var parsing - supports config-file-only setting for specifying lists and dicts - Bad: - passes in config file and env settings via parse_args namespace param - tests not finished and don't work with Python 3 (import StringIO) - argparse_config v0.5.1 (https://pypi.python.org/pypi/argparse_config) - Good: - similar features to ConfArgParse v1.0.15 - Bad: - doesn't work with Python 3 (error during pip install) - yconf v0.3.2 - (https://pypi.python.org/pypi/yconf) - features and interface not that great - hieropt v0.3 - (https://pypi.python.org/pypi/hieropt) - doesn't appear to be maintained, couldn't find documentation - configurati v0.2.3 - (https://pypi.python.org/pypi/configurati) - Good: - JSON, YAML, or Python configuration files - handles rich data structures such as dictionaries - can group configuration names into sections (like .ini files) - Bad: - doesn't work with Python 3 - 2+ years since last release to PyPI - apparently unmaintained Design choices: 1. all options must be settable via command line. Having options that can only be set using config files or env. vars adds complexity to the API, and is not a useful enough feature since the developer can split up options into sections and call a section "config file keys", with command line args that are just "--" plus the config key. 2. config file and env. var settings should be processed by appending them to the command line (another benefit of #1). This is an easy-to-implement solution and implicitly takes care of checking that all "required" args are provided, etc., plus the behavior should be easy for users to understand. 3. configargparse shouldn't override argparse's convert_arg_line_to_args method so that all argparse unit tests can be run on configargparse. 4. in terms of what to allow for config file keys, the "dest" value of an option can't serve as a valid config key because many options can have the same dest. Instead, since multiple options can't use the same long arg (eg. "--long-arg-x"), let the config key be either "--long-arg-x" or "long-arg-x". This means the developer can allow only a subset of the command-line args to be specified via config file (eg. short args like -x would be excluded). Also, that way config keys are automatically documented whenever the command line args are documented in the help message. 5. don't force users to put config file settings in the right .ini [sections]. This doesn't have a clear benefit since all options are command-line settable, and so have a globally unique key anyway. Enforcing sections just makes things harder for the user and adds complexity to the implementation. 6. if necessary, config-file-only args can be added later by implementing a separate add method and using the namespace arg as in appsettings_v0.5 Relevant sites: - http://stackoverflow.com/questions/6133517/parse-config-file-environment-and-command-line-arguments-to-get-a-single-coll - http://tricksntweaks.blogspot.com/2013_05_01_archive.html - http://www.youtube.com/watch?v=vvCwqHgZJc8#t=35 .. |Travis CI Status for bw2/ConfigArgParse| image:: https://travis-ci.org/bw2/ConfigArgParse.svg?branch=master Versioning ~~~~~~~~~~ This software follows `Semantic Versioning`_ .. _Semantic Versioning: http://semver.org/ upstream-ontologist-0.2.4/readme_tests/argparse/description000064400000000000000000000006011046102023000224140ustar 00000000000000Applications with more than a handful of user-settable options are best configured through a combination of command line args, config files, hard-coded defaults, and in some cases, environment variables. Python's command line parsing modules such as argparse have very limited support for config files and environment variables, so this module extends argparse to add these features. upstream-ontologist-0.2.4/readme_tests/bitlbee/README.md000064400000000000000000000034761046102023000212440ustar 00000000000000# BitlBee ![](https://www.bitlbee.org/style/logo.png) [![Build Status](https://travis-ci.org/bitlbee/bitlbee.svg)](https://travis-ci.org/bitlbee/bitlbee) [![Coverity Scan Build Status](https://scan.coverity.com/projects/4028/badge.svg)](https://scan.coverity.com/projects/4028) An IRC to other chat networks gateway Main website: https://www.bitlbee.org/ Bug tracker: https://bugs.bitlbee.org/ Wiki: https://wiki.bitlbee.org/ License: GPLv2 ## Installation BitlBee is available in the package managers of most distros. For debian/ubuntu/etc you may use the nightly APT repository: https://code.bitlbee.org/debian/ You can also use a public server (such as `im.bitlbee.org`) instead of installing it: https://www.bitlbee.org/main.php/servers.html ## Compiling If you wish to compile it yourself, ensure you have the following packages and their headers: * glib 2.32 or newer (not to be confused with glibc) * gnutls * python 2 or 3 (for the user guide) Some optional features have additional dependencies, such as libpurple, libotr, libevent, etc. NSS and OpenSSL are also available but not as well supported as GnuTLS. Once you have the dependencies, building should be a matter of: ./configure make sudo make install ## Development tips * To enable debug symbols: `./configure --debug=1` * To get some additional debug output for some protocols: `BITLBEE_DEBUG=1 ./bitlbee -Dnv` * Use github pull requests against the 'develop' branch to submit patches. * The coding style based on K&R with tabs and 120 columns. See `./doc/uncrustify.cfg` for the parameters used to reformat the code. * Mappings of bzr revisions to git commits (for historical purposes) are available in `./doc/git-bzr-rev-map` * See also `./doc/README` and `./doc/HACKING` ## Help? Join **#BitlBee** on OFTC (**irc.oftc.net**) (OFTC, *not* freenode!) upstream-ontologist-0.2.4/readme_tests/bitlbee/description000064400000000000000000000000461046102023000222210ustar 00000000000000An IRC to other chat networks gateway upstream-ontologist-0.2.4/readme_tests/bup/README.md000064400000000000000000000617601046102023000204240ustar 00000000000000bup: It backs things up ======================= bup is a program that backs things up. It's short for "backup." Can you believe that nobody else has named an open source program "bup" after all this time? Me neither. Despite its unassuming name, bup is pretty cool. To give you an idea of just how cool it is, I wrote you this poem: Bup is teh awesome What rhymes with awesome? I guess maybe possum But that's irrelevant. Hmm. Did that help? Maybe prose is more useful after all. Reasons bup is awesome ---------------------- bup has a few advantages over other backup software: - It uses a rolling checksum algorithm (similar to rsync) to split large files into chunks. The most useful result of this is you can backup huge virtual machine (VM) disk images, databases, and XML files incrementally, even though they're typically all in one huge file, and not use tons of disk space for multiple versions. - It uses the packfile format from git (the open source version control system), so you can access the stored data even if you don't like bup's user interface. - Unlike git, it writes packfiles *directly* (instead of having a separate garbage collection / repacking stage) so it's fast even with gratuitously huge amounts of data. bup's improved index formats also allow you to track far more filenames than git (millions) and keep track of far more objects (hundreds or thousands of gigabytes). - Data is "automagically" shared between incremental backups without having to know which backup is based on which other one - even if the backups are made from two different computers that don't even know about each other. You just tell bup to back stuff up, and it saves only the minimum amount of data needed. - You can back up directly to a remote bup server, without needing tons of temporary disk space on the computer being backed up. And if your backup is interrupted halfway through, the next run will pick up where you left off. And it's easy to set up a bup server: just install bup on any machine where you have ssh access. - Bup can use "par2" redundancy to recover corrupted backups even if your disk has undetected bad sectors. - Even when a backup is incremental, you don't have to worry about restoring the full backup, then each of the incrementals in turn; an incremental backup *acts* as if it's a full backup, it just takes less disk space. - You can mount your bup repository as a FUSE filesystem and access the content that way, and even export it over Samba. - It's written in python (with some C parts to make it faster) so it's easy for you to extend and maintain. Reasons you might want to avoid bup ----------------------------------- - It's not remotely as well tested as something like tar, so it's more likely to eat your data. It's also missing some probably-critical features, though fewer than it used to be. - It requires python 3.7 or newer (or 2.7 for a bit longer), a C compiler, and an installed git version >= 1.5.6. It also requires par2 if you want fsck to be able to generate the information needed to recover from some types of corruption. While python 2.7 is still supported, please make plans to upgrade. Python 2 upstream support ended on 2020-01-01, and we plan to drop support soon too. - It currently only works on Linux, FreeBSD, NetBSD, OS X >= 10.4, Solaris, or Windows (with Cygwin, and WSL). Patches to support other platforms are welcome. - Until resolved, a [glibc bug](https://sourceware.org/bugzilla/show_bug.cgi?id=26034) might cause bup to crash on startup for some (unusual) command line argument values, when bup is configured to use Python 3. - Any items in "Things that are stupid" below. Notable changes introduced by a release ======================================= - Changes in 0.32 as compared to 0.31 - Changes in 0.31 as compared to 0.30.1 - Changes in 0.30.1 as compared to 0.30 - Changes in 0.30 as compared to 0.29.3 - Changes in 0.29.3 as compared to 0.29.2 - Changes in 0.29.2 as compared to 0.29.1 - Changes in 0.29.1 as compared to 0.29 - Changes in 0.29 as compared to 0.28.1 - Changes in 0.28.1 as compared to 0.28 - Changes in 0.28 as compared to 0.27.1 - Changes in 0.27.1 as compared to 0.27 Test status =========== | branch | Debian | FreeBSD | macOS | |--------|------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------| | master | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=master&task=macos)](https://cirrus-ci.com/github/bup/bup) | | 0.30.x | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.30.x&task=macos)](https://cirrus-ci.com/github/bup/bup) | | 0.29.x | [![Debian test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=debian)](https://cirrus-ci.com/github/bup/bup) | [![FreeBSD test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=freebsd)](https://cirrus-ci.com/github/bup/bup) | [![macOS test status](https://api.cirrus-ci.com/github/bup/bup.svg?branch=0.29.x&task=macos)](https://cirrus-ci.com/github/bup/bup) | Getting started =============== From source ----------- - Check out the bup source code using git: ```sh git clone https://github.com/bup/bup ``` - This will leave you on the master branch, which is perfect if you would like to help with development, but if you'd just like to use bup, please check out the latest stable release like this: ```sh git checkout 0.32 ``` You can see the latest stable release here: https://github.com/bup/bup/releases. - Install the required python libraries (including the development libraries). On very recent Debian/Ubuntu versions, this may be sufficient (run as root): ```sh apt-get build-dep bup ``` Otherwise try this: ```sh apt-get install python3.7-dev python3-fuse apt-get install python3-pyxattr python3-pytest apt-get install python3-distutils apt-get install pkg-config linux-libc-dev libacl1-dev apt-get install gcc make acl attr rsync apt-get install python3-pytest-xdist # optional (parallel tests) apt-get install par2 # optional (error correction) apt-get install libreadline-dev # optional (bup ftp) apt-get install python3-tornado # optional (bup web) ``` Or, if you can't yet migrate to Python 3 (please try to soon): ```sh apt-get install python2.7-dev python-fuse apt-get install python-pyxattr python-pytest apt-get install pkg-config linux-libc-dev libacl1-dev apt-get install gcc make acl attr rsync apt-get install python-pytest-xdist # optional (parallel tests) apt-get install par2 # optional (error correction) apt-get install libreadline-dev # optional (bup ftp) apt-get install python-tornado # optional (bup web) ``` On CentOS (for CentOS 6, at least), this should be sufficient (run as root): ```sh yum groupinstall "Development Tools" yum install python2 python2-devel libacl-devel pylibacl yum install fuse-python pyxattr yum install perl-Time-HiRes yum install readline-devel # optional (bup ftp) yum install python-tornado # optional (bup web) ``` In addition to the default CentOS repositories, you may need to add RPMForge (for fuse-python) and EPEL (for pyxattr). On Cygwin, install python, make, rsync, and gcc4. If you would like to use the optional bup web server on systems without a tornado package, you may want to try this: ```sh pip install tornado ``` - Build the python module and symlinks: ```sh make ``` - Run the tests: ```sh make long-check ``` or if you're in a bit more of a hurry: ```sh make check ``` If you have the Python xdist module installed, then you can probably run the tests faster by adding the make -j option (see ./HACKING for additional information): ```sh make -j check ``` The tests should pass. If they don't pass for you, stop here and send an email to bup-list@googlegroups.com. Though if there are symbolic links along the current working directory path, the tests may fail. Running something like this before "make test" should sidestep the problem: ```sh cd "$(pwd -P)" ``` - You can install bup via "make install", and override the default destination with DESTDIR and PREFIX. Files are normally installed to "$DESTDIR/$PREFIX" where DESTDIR is empty by default, and PREFIX is set to /usr/local. So if you wanted to install bup to /opt/bup, you might do something like this: ```sh make install DESTDIR=/opt/bup PREFIX='' ``` - The Python executable that bup will use is chosen by ./configure, which will search for a reasonable version unless PYTHON is set in the environment, in which case, bup will use that path. You can see which Python executable was chosen by looking at the configure output, or examining cmd/python-cmd.sh, and you can change the selection by re-running ./configure. From binary packages -------------------- Binary packages of bup are known to be built for the following OSes: - Debian: http://packages.debian.org/search?searchon=names&keywords=bup - Ubuntu: http://packages.ubuntu.com/search?searchon=names&keywords=bup - pkgsrc (NetBSD, Dragonfly, and others) http://pkgsrc.se/sysutils/bup http://cvsweb.netbsd.org/bsdweb.cgi/pkgsrc/sysutils/bup/ - Arch Linux: https://www.archlinux.org/packages/?sort=&q=bup - Fedora: https://apps.fedoraproject.org/packages/bup - macOS (Homebrew): https://formulae.brew.sh/formula/bup Using bup --------- - Get help for any bup command: ```sh bup help bup help init bup help index bup help save bup help restore ... ``` - Initialize the default BUP_DIR (~/.bup -- you can choose another by either specifying `bup -d DIR ...` or setting the `BUP_DIR` environment variable for a command): ```sh bup init ``` - Make a local backup (-v or -vv will increase the verbosity): ```sh bup index /etc bup save -n local-etc /etc ``` - Restore a local backup to ./dest: ```sh bup restore -C ./dest local-etc/latest/etc ls -l dest/etc ``` - Look at how much disk space your backup took: ```sh du -s ~/.bup ``` - Make another backup (which should be mostly identical to the last one; notice that you don't have to *specify* that this backup is incremental, it just saves space automatically): ```sh bup index /etc bup save -n local-etc /etc ``` - Look how little extra space your second backup used (on top of the first): ```sh du -s ~/.bup ``` - Get a list of your previous backups: ```sh bup ls local-etc ``` - Restore your first backup again: ```sh bup restore -C ./dest-2 local-etc/2013-11-23-11195/etc ``` - Make a backup to a remote server which must already have the 'bup' command somewhere in its PATH (see /etc/profile, etc/environment, ~/.profile, or ~/.bashrc), and be accessible via ssh. Make sure to replace SERVERNAME with the actual hostname of your server: ```sh bup init -r SERVERNAME:path/to/remote-bup-dir bup index /etc bup save -r SERVERNAME:path/to/remote-bup-dir -n local-etc /etc ``` - Make a remote backup to ~/.bup on SERVER: ```sh bup index /etc bup save -r SERVER: -n local-etc /etc ``` - See what saves are available in ~/.bup on SERVER: ```sh bup ls -r SERVER: ``` - Restore the remote backup to ./dest: ```sh bup restore -r SERVER: -C ./dest local-etc/latest/etc ls -l dest/etc ``` - Defend your backups from death rays (OK fine, more likely from the occasional bad disk block). This writes parity information (currently via par2) for all of the existing data so that bup may be able to recover from some amount of repository corruption: ```sh bup fsck -g ``` - Use split/join instead of index/save/restore. Try making a local backup using tar: ```sh tar -cvf - /etc | bup split -n local-etc -vv ``` - Try restoring the tarball: ```sh bup join local-etc | tar -tf - ``` - Look at how much disk space your backup took: ```sh du -s ~/.bup ``` - Make another tar backup: ```sh tar -cvf - /etc | bup split -n local-etc -vv ``` - Look at how little extra space your second backup used on top of the first: ```sh du -s ~/.bup ``` - Restore the first tar backup again (the ~1 is git notation for "one older than the most recent"): ```sh bup join local-etc~1 | tar -tf - ``` - Get a list of your previous split-based backups: ```sh GIT_DIR=~/.bup git log local-etc ``` - Save a tar archive to a remote server (without tar -z to facilitate deduplication): ```sh tar -cvf - /etc | bup split -r SERVERNAME: -n local-etc -vv ``` - Restore the archive: ```sh bup join -r SERVERNAME: local-etc | tar -tf - ``` That's all there is to it! Notes on FreeBSD ---------------- - FreeBSD's default 'make' command doesn't like bup's Makefile. In order to compile the code, run tests and install bup, you need to install GNU Make from the port named 'gmake' and use its executable instead in the commands seen above. (i.e. 'gmake test' runs bup's test suite) - Python's development headers are automatically installed with the 'python' port so there's no need to install them separately. - To use the 'bup fuse' command, you need to install the fuse kernel module from the 'fusefs-kmod' port in the 'sysutils' section and the libraries from the port named 'py-fusefs' in the 'devel' section. - The 'par2' command can be found in the port named 'par2cmdline'. - In order to compile the documentation, you need pandoc which can be found in the port named 'hs-pandoc' in the 'textproc' section. Notes on NetBSD/pkgsrc ---------------------- - See pkgsrc/sysutils/bup, which should be the most recent stable release and includes man pages. It also has a reasonable set of dependencies (git, par2, py-fuse-bindings). - The "fuse-python" package referred to is hard to locate, and is a separate tarball for the python language binding distributed by the fuse project on sourceforge. It is available as pkgsrc/filesystems/py-fuse-bindings and on NetBSD 5, "bup fuse" works with it. - "bup fuse" presents every directory/file as inode 0. The directory traversal code ("fts") in NetBSD's libc will interpret this as a cycle and error out, so "ls -R" and "find" will not work. - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. Notes on Cygwin --------------- - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. - In test/ext/test-misc, two tests have been disabled. These tests check to see that repeated saves produce identical trees and that an intervening index doesn't change the SHA1. Apparently Cygwin has some unusual behaviors with respect to access times (that probably warrant further investigation). Possibly related: http://cygwin.com/ml/cygwin/2007-06/msg00436.html Notes on OS X ------------- - There is no support for ACLs. If/when some enterprising person fixes this, adjust dev/compare-trees. How it works ============ Basic storage: -------------- bup stores its data in a git-formatted repository. Unfortunately, git itself doesn't actually behave very well for bup's use case (huge numbers of files, files with huge sizes, retaining file permissions/ownership are important), so we mostly don't use git's *code* except for a few helper programs. For example, bup has its own git packfile writer written in python. Basically, 'bup split' reads the data on stdin (or from files specified on the command line), breaks it into chunks using a rolling checksum (similar to rsync), and saves those chunks into a new git packfile. There is at least one git packfile per backup. When deciding whether to write a particular chunk into the new packfile, bup first checks all the other packfiles that exist to see if they already have that chunk. If they do, the chunk is skipped. git packs come in two parts: the pack itself (*.pack) and the index (*.idx). The index is pretty small, and contains a list of all the objects in the pack. Thus, when generating a remote backup, we don't have to have a copy of the packfiles from the remote server: the local end just downloads a copy of the server's *index* files, and compares objects against those when generating the new pack, which it sends directly to the server. The "-n" option to 'bup split' and 'bup save' is the name of the backup you want to create, but it's actually implemented as a git branch. So you can do cute things like checkout a particular branch using git, and receive a bunch of chunk files corresponding to the file you split. If you use '-b' or '-t' or '-c' instead of '-n', bup split will output a list of blobs, a tree containing that list of blobs, or a commit containing that tree, respectively, to stdout. You can use this to construct your own scripts that do something with those values. The bup index: -------------- 'bup index' walks through your filesystem and updates a file (whose name is, by default, ~/.bup/bupindex) to contain the name, attributes, and an optional git SHA1 (blob id) of each file and directory. 'bup save' basically just runs the equivalent of 'bup split' a whole bunch of times, once per file in the index, and assembles a git tree that contains all the resulting objects. Among other things, that makes 'git diff' much more useful (compared to splitting a tarball, which is essentially a big binary blob). However, since bup splits large files into smaller chunks, the resulting tree structure doesn't *exactly* correspond to what git itself would have stored. Also, the tree format used by 'bup save' will probably change in the future to support storing file ownership, more complex file permissions, and so on. If a file has previously been written by 'bup save', then its git blob/tree id is stored in the index. This lets 'bup save' avoid reading that file to produce future incremental backups, which means it can go *very* fast unless a lot of files have changed. Things that are stupid for now but which we'll fix later ======================================================== Help with any of these problems, or others, is very welcome. Join the mailing list (see below) if you'd like to help. - 'bup save' and 'bup restore' have immature metadata support. On the plus side, they actually do have support now, but it's new, and not remotely as well tested as tar/rsync/whatever's. However, you have to start somewhere, and as of 0.25, we think it's ready for more general use. Please let us know if you have any trouble. Also, if any strip or graft-style options are specified to 'bup save', then no metadata will be written for the root directory. That's obviously less than ideal. - bup is overly optimistic about mmap. Right now bup just assumes that it can mmap as large a block as it likes, and that mmap will never fail. Yeah, right... If nothing else, this has failed on 32-bit architectures (and 31-bit is even worse -- looking at you, s390). To fix this, we might just implement a FakeMmap[1] class that uses normal file IO and handles all of the mmap methods[2] that bup actually calls. Then we'd swap in one of those whenever mmap fails. This would also require implementing some of the methods needed to support "[]" array access, probably at a minimum __getitem__, __setitem__, and __setslice__ [3]. [1] http://comments.gmane.org/gmane.comp.sysutils.backup.bup/613 [2] http://docs.python.org/2/library/mmap.html [3] http://docs.python.org/2/reference/datamodel.html#emulating-container-types - 'bup index' is slower than it should be. It's still rather fast: it can iterate through all the filenames on my 600,000 file filesystem in a few seconds. But it still needs to rewrite the entire index file just to add a single filename, which is pretty nasty; it should just leave the new files in a second "extra index" file or something. - bup could use inotify for *really* efficient incremental backups. You could even have your system doing "continuous" backups: whenever a file changes, we immediately send an image of it to the server. We could give the continuous-backup process a really low CPU and I/O priority so you wouldn't even know it was running. - bup only has experimental support for pruning old backups. While you should now be able to drop old saves and branches with `bup rm`, and reclaim the space occupied by data that's no longer needed by other backups with `bup gc`, these commands are experimental, and should be handled with great care. See the man pages for more information. Unless you want to help test the new commands, one possible workaround is to just start a new BUP_DIR occasionally, i.e. bup-2013, bup-2014... - bup has never been tested on anything but Linux, FreeBSD, NetBSD, OS X, and Windows+Cygwin. There's nothing that makes it *inherently* non-portable, though, so that's mostly a matter of someone putting in some effort. (For a "native" Windows port, the most annoying thing is the absence of ssh in a default Windows installation.) - bup needs better documentation. According to an article about bup in Linux Weekly News (https://lwn.net/Articles/380983/), "it's a bit short on examples and a user guide would be nice." Documentation is the sort of thing that will never be great unless someone from outside contributes it (since the developers can never remember which parts are hard to understand). - bup is "relatively speedy" and has "pretty good" compression. ...according to the same LWN article. Clearly neither of those is good enough. We should have awe-inspiring speed and crazy-good compression. Must work on that. Writing more parts in C might help with the speed. - bup has no GUI. Actually, that's not stupid, but you might consider it a limitation. See the ["Related Projects"](https://bup.github.io/) list for some possible options. More Documentation ================== bup has an extensive set of man pages. Try using 'bup help' to get started, or use 'bup help SUBCOMMAND' for any bup subcommand (like split, join, index, save, etc.) to get details on that command. For further technical details, please see ./DESIGN. How you can help ================ bup is a work in progress and there are many ways it can still be improved. If you'd like to contribute patches, ideas, or bug reports, please join the bup mailing list. You can find the mailing list archives here: http://groups.google.com/group/bup-list and you can subscribe by sending a message to: bup-list+subscribe@googlegroups.com Please see ./HACKING for additional information, i.e. how to submit patches (hint - no pull requests), how we handle branches, etc. Have fun, Avery upstream-ontologist-0.2.4/readme_tests/bup/description000064400000000000000000000010261046102023000214000ustar 00000000000000bup is a program that backs things up. It's short for "backup." Can you believe that nobody else has named an open source program "bup" after all this time? Me neither. Despite its unassuming name, bup is pretty cool. To give you an idea of just how cool it is, I wrote you this poem: Bup is teh awesome What rhymes with awesome? I guess maybe possum But that's irrelevant. Hmm. Did that help? Maybe prose is more useful after all. upstream-ontologist-0.2.4/readme_tests/cbor2/README.rst000064400000000000000000000073231046102023000210500ustar 00000000000000.. image:: https://travis-ci.com/agronholm/cbor2.svg?branch=master :target: https://travis-ci.com/agronholm/cbor2 :alt: Build Status .. image:: https://coveralls.io/repos/github/agronholm/cbor2/badge.svg?branch=master :target: https://coveralls.io/github/agronholm/cbor2?branch=master :alt: Code Coverage .. image:: https://readthedocs.org/projects/cbor2/badge/?version=latest :target: https://cbor2.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status About ===== This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) (`RFC 7049`_) serialization format. `Read the docs `_ to learn more. It is implemented in pure python with an optional C backend. On PyPy, cbor2 runs with almost identical performance to the C backend. .. _RFC 7049: https://tools.ietf.org/html/rfc7049 Features -------- * Simple api like ``json`` or ``pickle`` modules. * Support many `CBOR tags`_ with `stdlib objects`_. * Generic tag decoding. * `Shared value`_ references including cyclic references. * Optional C module backend tested on big- and little-endian architectures. * Extensible `tagged value handling`_ using ``tag_hook`` and ``object_hook`` on decode and ``default`` on encode. * Command-line diagnostic tool, converting CBOR file or stream to JSON ``python -m cbor2.tool`` (This is a lossy conversion, for diagnostics only) * Thorough test suite. .. _CBOR tags: https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml .. _stdlib objects: https://cbor2.readthedocs.io/en/latest/usage.html#tag-support .. _Shared value: http://cbor.schmorp.de/value-sharing .. _tagged value handling: https://cbor2.readthedocs.io/en/latest/customizing.html#using-the-cbor-tags-for-custom-types Installation ============ :: pip install cbor2 Requirements ------------ * Python >= 3.6 (or `PyPy3`_ 3.6+) * C-extension: Any C compiler that can build Python extensions. Any modern libc with the exception of Glibc<2.9 .. _PyPy3: https://www.pypy.org/ Building the C-Extension ------------------------ To force building of the optional C-extension, set OS env ``CBOR2_BUILD_C_EXTENSION=1``. To disable building of the optional C-extension, set OS env ``CBOR2_BUILD_C_EXTENSION=0``. If this environment variable is unset, setup.py will default to auto detecting a compatible C library and attempt to compile the extension. Usage ===== `Basic Usage `_ Command-line Usage ================== ``python -m cbor2.tool`` converts CBOR data in raw binary or base64 encoding into a representation that allows printing as JSON. This is a lossy transformation as each datatype is converted into something that can be represented as a JSON value. Usage:: # Pass hexadecimal through xxd. $ echo a16568656c6c6f65776f726c64 | xxd -r -ps | python -m cbor2.tool --pretty { "hello": "world" } # Decode Base64 directly $ echo ggEC | python -m cbor2.tool --decode [1, 2] # Read from a file encoded in Base64 $ python -m cbor2.tool -d tests/examples.cbor.b64 {...} It can be used in a pipeline with json processing tools like `jq`_ to allow syntax coloring, field extraction and more. CBOR data items concatenated into a sequence can be decoded also:: $ echo ggECggMEggUG | python -m cbor2.tool -d --sequence [1, 2] [3, 4] [5, 6] Multiple files can also be sent to a single output file:: $ python -m cbor2.tool -o all_files.json file1.cbor file2.cbor ... fileN.cbor .. _jq: https://stedolan.github.io/jq/ Security ======== This library has not been tested against malicious input. In theory it should be as safe as JSON, since unlike ``pickle`` the decoder does not execute any code. upstream-ontologist-0.2.4/readme_tests/cbor2/description000064400000000000000000000004451046102023000216250ustar 00000000000000This library provides encoding and decoding for the Concise Binary Object Representation (CBOR) (RFC 7049) serialization format. Read the docs to learn more. It is implemented in pure python with an optional C backend. On PyPy, cbor2 runs with almost identical performance to the C backend. upstream-ontologist-0.2.4/readme_tests/django-ical/README.rst000064400000000000000000000032631046102023000222100ustar 00000000000000django-ical =========== |pypi| |docs| |build| |coverage| |jazzband| django-ical is a simple library/framework for creating `iCal `_ feeds based in Django's `syndication feed framework `_. This documentation is modeled after the documentation for the syndication feed framework so you can think of it as a simple extension. If you are familiar with the Django syndication feed framework you should be able to be able to use django-ical fairly quickly. It works the same way as the Django syndication framework but adds a few extension properties to support iCalendar feeds. django-ical uses the `icalendar `_ library under the hood to generate iCalendar feeds. Documentation ------------- Documentation is hosted on Read the Docs: https://django-ical.readthedocs.io/en/latest/ .. |pypi| image:: https://img.shields.io/pypi/v/django-ical.svg :alt: PyPI :target: https://pypi.org/project/django-ical/ .. |docs| image:: https://readthedocs.org/projects/django-ical/badge/?version=latest :alt: Documentation Status :scale: 100% :target: http://django-ical.readthedocs.io/en/latest/?badge=latest .. |build| image:: https://github.com/jazzband/django-ical/workflows/Test/badge.svg :target: https://github.com/jazzband/django-ical/actions :alt: GitHub Actions .. |coverage| image:: https://codecov.io/gh/jazzband/django-ical/branch/master/graph/badge.svg :target: https://codecov.io/gh/jazzband/django-ical :alt: Coverage .. |jazzband| image:: https://jazzband.co/static/img/badge.svg :target: https://jazzband.co/ :alt: Jazzband upstream-ontologist-0.2.4/readme_tests/django-ical/description000064400000000000000000000011071046102023000227620ustar 00000000000000django-ical is a simple library/framework for creating iCal feeds based in Django's syndication feed framework. This documentation is modeled after the documentation for the syndication feed framework so you can think of it as a simple extension. If you are familiar with the Django syndication feed framework you should be able to be able to use django-ical fairly quickly. It works the same way as the Django syndication framework but adds a few extension properties to support iCalendar feeds. django-ical uses the icalendar library under the hood to generate iCalendar feeds. upstream-ontologist-0.2.4/readme_tests/dulwich/README.rst000064400000000000000000000056041046102023000215000ustar 00000000000000Dulwich ======= This is the Dulwich project. It aims to provide an interface to git repos (both local and remote) that doesn't call out to git directly but instead uses pure Python. **Main website**: **License**: Apache License, version 2 or GNU General Public License, version 2 or later. The project is named after the part of London that Mr. and Mrs. Git live in in the particular Monty Python sketch. Installation ------------ By default, Dulwich' setup.py will attempt to build and install the optional C extensions. The reason for this is that they significantly improve the performance since some low-level operations that are executed often are much slower in CPython. If you don't want to install the C bindings, specify the --pure argument to setup.py:: $ python setup.py --pure install or if you are installing from pip:: $ pip install dulwich --global-option="--pure" Note that you can also specify --global-option in a `requirements.txt `_ file, e.g. like this:: dulwich --global-option=--pure Getting started --------------- Dulwich comes with both a lower-level API and higher-level plumbing ("porcelain"). For example, to use the lower level API to access the commit message of the last commit:: >>> from dulwich.repo import Repo >>> r = Repo('.') >>> r.head() '57fbe010446356833a6ad1600059d80b1e731e15' >>> c = r[r.head()] >>> c >>> c.message 'Add note about encoding.\n' And to print it using porcelain:: >>> from dulwich import porcelain >>> porcelain.log('.', max_entries=1) -------------------------------------------------- commit: 57fbe010446356833a6ad1600059d80b1e731e15 Author: Jelmer Vernooij Date: Sat Apr 29 2017 23:57:34 +0000 Add note about encoding. Further documentation --------------------- The dulwich documentation can be found in docs/ and built by running ``make doc``. It can also be found `on the web `_. Help ---- There is a *#dulwich* IRC channel on the `Freenode `_, and `dulwich-announce `_ and `dulwich-discuss `_ mailing lists. Contributing ------------ For a full list of contributors, see the git logs or `AUTHORS `_. If you'd like to contribute to Dulwich, see the `CONTRIBUTING `_ file and `list of open issues `_. Supported versions of Python ---------------------------- At the moment, Dulwich supports (and is tested on) CPython 3.5 and later and Pypy. The latest release series to support Python 2.x was the 0.19 series. See the 0.19 branch in the Dulwich git repository. upstream-ontologist-0.2.4/readme_tests/dulwich/description000064400000000000000000000002471046102023000222550ustar 00000000000000This is the Dulwich project. It aims to provide an interface to git repos (both local and remote) that doesn't call out to git directly but instead uses pure Python. upstream-ontologist-0.2.4/readme_tests/empty/README.md000064400000000000000000000000001046102023000207500ustar 00000000000000upstream-ontologist-0.2.4/readme_tests/erbium/README.md000064400000000000000000000010101046102023000210770ustar 00000000000000Erbium ====== Erbium[^0] provides networking services for use on small/home networks. Erbium currently supports both DNS and DHCP, with other protocols hopefully coming soon. Erbium is in early development. * DNS is still in early development, and not ready for use. * DHCP is beta quality. Should be ready for test use. * Router Advertisements are alpha quality. Should be ready for limited testing. [^0]: Erbium is the 68th element in the periodic table, the same as the client port number for DHCP. upstream-ontologist-0.2.4/readme_tests/erbium/description000064400000000000000000000007561046102023000221060ustar 00000000000000Erbium[^0] provides networking services for use on small/home networks. Erbium currently supports both DNS and DHCP, with other protocols hopefully coming soon. Erbium is in early development. * DNS is still in early development, and not ready for use. * DHCP is beta quality. Should be ready for test use. * Router Advertisements are alpha quality. Should be ready for limited testing. [^0]: Erbium is the 68th element in the periodic table, the same as the client port number for DHCP. upstream-ontologist-0.2.4/readme_tests/isso/README.md000064400000000000000000000006071046102023000206040ustar 00000000000000Isso – a commenting server similar to Disqus ============================================ Isso – *Ich schrei sonst* – is a lightweight commenting server written in Python and JavaScript. It aims to be a drop-in replacement for [Disqus](http://disqus.com). ![Isso in Action](http://posativ.org/~tmp/isso-sample.png) See [posativ.org/isso](http://posativ.org/isso/) for more details. upstream-ontologist-0.2.4/readme_tests/isso/description000064400000000000000000000002231046102023000215650ustar 00000000000000Isso – Ich schrei sonst – is a lightweight commenting server written in Python and JavaScript. It aims to be a drop-in replacement for Disqus. upstream-ontologist-0.2.4/readme_tests/jadx/README.md000064400000000000000000000123351046102023000205560ustar 00000000000000## JADX [![Build Status](https://travis-ci.org/skylot/jadx.png?branch=master)](https://travis-ci.org/skylot/jadx) [![Code Coverage](https://codecov.io/gh/skylot/jadx/branch/master/graph/badge.svg)](https://codecov.io/gh/skylot/jadx) [![SonarQube Bugs](https://sonarcloud.io/api/project_badges/measure?project=jadx&metric=bugs)](https://sonarcloud.io/dashboard?id=jadx) [![License](http://img.shields.io/:license-apache-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.html) [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) **jadx** - Dex to Java decompiler Command line and GUI tools for produce Java source code from Android Dex and Apk files ![jadx-gui screenshot](https://i.imgur.com/h917IBZ.png) ### Downloads - latest [unstable build: ![Download](https://api.bintray.com/packages/skylot/jadx/unstable/images/download.svg) ](https://bintray.com/skylot/jadx/unstable/_latestVersion#files) - release from [github: ![Latest release](https://img.shields.io/github/release/skylot/jadx.svg)](https://github.com/skylot/jadx/releases/latest) - release from [bintray: ![Download](https://api.bintray.com/packages/skylot/jadx/releases/images/download.svg) ](https://bintray.com/skylot/jadx/releases/_latestVersion#files) After download unpack zip file go to `bin` directory and run: - `jadx` - command line version - `jadx-gui` - graphical version On Windows run `.bat` files with double-click\ **Note:** ensure you have installed Java 8 64-bit version ### Related projects: - [PyJadx](https://github.com/romainthomas/pyjadx) - python binding for jadx by [@romainthomas](https://github.com/romainthomas) ### Building jadx from source JDK 8 or higher must be installed: git clone https://github.com/skylot/jadx.git cd jadx ./gradlew dist (on Windows, use `gradlew.bat` instead of `./gradlew`) Scripts for run jadx will be placed in `build/jadx/bin` and also packed to `build/jadx-.zip` ### macOS You can install using brew: brew install jadx ### Run Run **jadx** on itself: cd build/jadx/ bin/jadx -d out lib/jadx-core-*.jar # or bin/jadx-gui lib/jadx-core-*.jar ### Usage ``` jadx[-gui] [options] (.apk, .dex, .jar, .class, .smali, .zip, .aar, .arsc) options: -d, --output-dir - output directory -ds, --output-dir-src - output directory for sources -dr, --output-dir-res - output directory for resources -j, --threads-count - processing threads count -r, --no-res - do not decode resources -s, --no-src - do not decompile source code --single-class - decompile a single class --output-format - can be 'java' or 'json' (default: java) -e, --export-gradle - save as android gradle project --show-bad-code - show inconsistent code (incorrectly decompiled) --no-imports - disable use of imports, always write entire package name --no-debug-info - disable debug info --no-inline-anonymous - disable anonymous classes inline --no-replace-consts - don't replace constant value with matching constant field --escape-unicode - escape non latin characters in strings (with \u) --respect-bytecode-access-modifiers - don't change original access modifiers --deobf - activate deobfuscation --deobf-min - min length of name, renamed if shorter (default: 3) --deobf-max - max length of name, renamed if longer (default: 64) --deobf-rewrite-cfg - force to save deobfuscation map --deobf-use-sourcename - use source file name as class name alias --rename-flags - what to rename, comma-separated, 'case' for system case sensitivity, 'valid' for java identifiers, 'printable' characters, 'none' or 'all' --fs-case-sensitive - treat filesystem as case sensitive, false by default --cfg - save methods control flow graph to dot file --raw-cfg - save methods control flow graph (use raw instructions) -f, --fallback - make simple dump (using goto instead of 'if', 'for', etc) -v, --verbose - verbose output --version - print jadx version -h, --help - print this help Example: jadx -d out classes.dex jadx --rename-flags "none" classes.dex jadx --rename-flags "valid,printable" classes.dex ``` These options also worked on jadx-gui running from command line and override options from preferences dialog ### Troubleshooting ##### Out of memory error: - Reduce processing threads count (`-j` option) - Increase maximum java heap size: * command line (example for linux): `JAVA_OPTS="-Xmx4G" jadx -j 1 some.apk` * edit 'jadx' script (jadx.bat on Windows) and setup bigger heap size: `DEFAULT_JVM_OPTS="-Xmx2500M"` --------------------------------------- *Licensed under the Apache 2.0 License* *Copyright 2018 by Skylot* upstream-ontologist-0.2.4/readme_tests/jadx/description000064400000000000000000000001271046102023000215410ustar 00000000000000Command line and GUI tools for produce Java source code from Android Dex and Apk files upstream-ontologist-0.2.4/readme_tests/jupyter-client/README.md000064400000000000000000000044341046102023000226070ustar 00000000000000# Jupyter Client [![Build Status](https://github.com/jupyter/jupyter_client/workflows/CI/badge.svg)](https://github.com/jupyter/jupyter_client/actions) [![Code Health](https://landscape.io/github/jupyter/jupyter_client/master/landscape.svg?style=flat)](https://landscape.io/github/jupyter/jupyter_client/master) `jupyter_client` contains the reference implementation of the [Jupyter protocol][]. It also provides client and kernel management APIs for working with kernels. It also provides the `jupyter kernelspec` entrypoint for installing kernelspecs for use with Jupyter frontends. [Jupyter protocol]: https://jupyter-client.readthedocs.io/en/latest/messaging.html # Development Setup The [Jupyter Contributor Guides](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html) provide extensive information on contributing code or documentation to Jupyter projects. The limited instructions below for setting up a development environment are for your convenience. ## Coding You'll need Python and `pip` on the search path. Clone the Jupyter Client git repository to your computer, for example in `/my/project/jupyter_client`. Now create an [editable install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs) and download the dependencies of code and test suite by executing: cd /my/projects/jupyter_client/ pip install -e .[test] py.test The last command runs the test suite to verify the setup. During development, you can pass filenames to `py.test`, and it will execute only those tests. ## Documentation The documentation of Jupyter Client is generated from the files in `docs/` using Sphinx. Instructions for setting up Sphinx with a selection of optional modules are in the [Documentation Guide](https://jupyter.readthedocs.io/en/latest/contributing/docs-contributions/index.html). You'll also need the `make` command. For a minimal Sphinx installation to process the Jupyter Client docs, execute: pip install ipykernel sphinx sphinx_rtd_theme The following commands build the documentation in HTML format and check for broken links: cd /my/projects/jupyter_client/docs/ make html linkcheck Point your browser to the following URL to access the generated documentation: _file:///my/projects/jupyter\_client/docs/\_build/html/index.html_ upstream-ontologist-0.2.4/readme_tests/jupyter-client/description000064400000000000000000000004121046102023000235660ustar 00000000000000jupyter_client contains the reference implementation of the Jupyter protocol. It also provides client and kernel management APIs for working with kernels. It also provides the jupyter kernelspec entrypoint for installing kernelspecs for use with Jupyter frontends. upstream-ontologist-0.2.4/readme_tests/libtrace/README000064400000000000000000000032431046102023000210140ustar 00000000000000libtrace 4.0.7 --------------------------------------------------------------------------- Copyright (c) 2007-2019 The University of Waikato, Hamilton, New Zealand. All rights reserved. This code has been developed by the University of Waikato WAND research group. For further information please see http://www.wand.net.nz/. --------------------------------------------------------------------------- See INSTALL for instructions on how to install libtrace. This directory contains source code for libtrace, a userspace library for processing of network traffic capture from live interfaces or from offline traces. libtrace was primarily designed for use with the real-time interface to the Waikato DAG Capture Point software running at The University of Waikato, and has been since extended to a range of other trace and interface formats. In version 4.0, we have introduced an API for processing packets in parallel using multiple threads. See libtrace_parallel.h for a detailed description of the API. Further information about libtrace, see http://research.wand.net.nz/software/libtrace.php Bugs should be reported by either emailing contact@wand.net.nz or filing an issue at https://github.com/LibtraceTeam/libtrace It is licensed under the GNU Lesser General Public License (GPL) version 3. Please see the included files COPYING and COPYING.LESSER for details of this license. A detailed ChangeLog can be found on the libtrace wiki: https://github.com/LibtraceTeam/libtrace/wiki/ChangeLog Documentation, usage instructions and a detailed tutorial can also found on the libtrace wiki. For further information, please contact the WAND group. See http://www.wand.net.nz/ for details. upstream-ontologist-0.2.4/readme_tests/libtrace/description000064400000000000000000000010451046102023000224000ustar 00000000000000This directory contains source code for libtrace, a userspace library for processing of network traffic capture from live interfaces or from offline traces. libtrace was primarily designed for use with the real-time interface to the Waikato DAG Capture Point software running at The University of Waikato, and has been since extended to a range of other trace and interface formats. In version 4.0, we have introduced an API for processing packets in parallel using multiple threads. See libtrace_parallel.h for a detailed description of the API. upstream-ontologist-0.2.4/readme_tests/perl-timedate/README000064400000000000000000000014531046102023000217640ustar 00000000000000This is the perl5 TimeDate distribution. It requires perl version 5.003 or later This distribution replaces my earlier GetDate distribution, which was only a date parser. The date parser contained in this distribution is far superior to the yacc based parser, and a *lot* faster. The parser contained here will only parse absolute dates, if you want a date parser that can parse relative dates then take a look at the Time modules by David Muir on CPAN. You install the library by running these commands: perl Makefile.PL make make test make install Please report any bugs/suggestions to Copyright 1995-2009 Graham Barr. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. Share and Enjoy! Graham upstream-ontologist-0.2.4/readme_tests/perl-timedate/description000064400000000000000000000007101046102023000233450ustar 00000000000000This is the perl5 TimeDate distribution. It requires perl version 5.003 or later This distribution replaces my earlier GetDate distribution, which was only a date parser. The date parser contained in this distribution is far superior to the yacc based parser, and a *lot* faster. The parser contained here will only parse absolute dates, if you want a date parser that can parse relative dates then take a look at the Time modules by David Muir on CPAN. upstream-ontologist-0.2.4/readme_tests/perl5-xml-compile-cache/README.md000064400000000000000000000042201046102023000241360ustar 00000000000000# distribution XML-Compile-Cache * My extended documentation: * Development via GitHub: * Download from CPAN: * Indexed from CPAN: and The XML-Compile suite is a large set of modules for various XML related standards. This optional component is very useful: it manages compiled handlers and helps you define prefixes. ## Development → Release Important to know, is that I use an extension on POD to write the manuals. The "raw" unprocessed version is visible on GitHub. It will run without problems, but does not contain manual-pages. Releases to CPAN are different: "raw" documentation gets removed from the code and translated into real POD and clean HTML. This reformatting is implemented with the OODoc distribution (A name I chose before OpenOffice existed, sorry for the confusion) Clone from github for the "raw" version. For instance, when you want to contribute a new feature. On github, you can find the processed version for each release. But the better source is CPAN; to get it installed simply run: ```sh cpan -i XML::Compile::Cache ``` ## Contributing When you want to contribute to this module, you do not need to provide a perfect patch... actually: it is nearly impossible to create a patch which I will merge without modification. Usually, I need to adapt the style of code and documentation to my own strict rules. When you submit an extension, please contribute a set with 1. code 2. code documentation 3. regression tests in t/ **Please note:** When you contribute in any way, you agree to transfer the copyrights to Mark Overmeer (you will get the honors in the code and/or ChangeLog). You also automatically agree that your contribution is released under the same license as this project: licensed as perl itself. ## Copyright and License This project is free software; you can redistribute it and/or modify it under the same terms as Perl itself. See upstream-ontologist-0.2.4/readme_tests/perl5-xml-compile-cache/description000064400000000000000000000002701046102023000251260ustar 00000000000000The XML-Compile suite is a large set of modules for various XML related standards. This optional component is very useful: it manages compiled handlers and helps you define prefixes. upstream-ontologist-0.2.4/readme_tests/pylint-flask/README.md000064400000000000000000000041431046102023000222430ustar 00000000000000pylint-flask =============== [![Build Status](https://travis-ci.org/jschaf/pylint-flask.svg?branch=master)](https://travis-ci.org/jschaf/pylint-flask) [![Coverage Status](https://coveralls.io/repos/jschaf/pylint-flask/badge.svg?branch=master)](https://coveralls.io/r/jschaf/pylint-flask?branch=master) [![PyPI](https://img.shields.io/pypi/v/pylint-flask.svg)](https://pypi.python.org/pypi/pylint-flask) [![License](https://img.shields.io/badge/license-GPLv2%20License-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) ## About `pylint-flask` is [Pylint](http://pylint.org) plugin for improving code analysis when editing code using [Flask](http://flask.pocoo.org/). Inspired by [pylint-django](https://github.com/landscapeio/pylint-django). ### Problems pylint-flask solves: 1. Recognize `flask.ext.*` style imports. Say you have the following code: ```python from flask.ext import wtf from flask.ext.wtf import validators class PostForm(wtf.Form): content = wtf.TextAreaField('Content', validators=[validators.Required()]) ``` Normally, pylint will throw errors like: ``` E: 1,0: No name 'wtf' in module 'flask.ext' E: 2,0: No name 'wtf' in module 'flask.ext' F: 2,0: Unable to import 'flask.ext.wtf' ``` As pylint builds it's own abstract syntax tree, `pylint-flask` will translate the `flask.ext` imports into the actual module name, so pylint can continue checking your code. ## Usage Ensure `pylint-flask` is installed and on your path, and then run pylint using pylint-flask as a plugin. ``` pip install pylint-flask pylint --load-plugins pylint_flask [..your module..] ``` ## Contributing Pull requests are always welcome. Here's an outline of the steps you need to prepare your code. 1. git clone https://github.com/jschaf/pylint-flask.git 2. cd pylint-flask 3. mkvirtualenv pylint-flask 4. pip install -r dev-requirements.txt 5. git checkout -b MY-NEW-FIX 6. Hack away 7. Make sure everything is green by running `tox` 7. git push origin MY-NEW-FIX 8. Create a pull request ## License pylint-flask is available under the GPLv2 license.upstream-ontologist-0.2.4/readme_tests/pylint-flask/description000064400000000000000000000001641046102023000232310ustar 00000000000000pylint-flask is Pylint plugin for improving code analysis when editing code using Flask. Inspired by pylint-django. upstream-ontologist-0.2.4/readme_tests/python-icalendar/README.rst000064400000000000000000000022431046102023000232760ustar 00000000000000========================================================== Internet Calendaring and Scheduling (iCalendar) for Python ========================================================== The `icalendar`_ package is a `RFC 5545`_ compatible parser/generator for iCalendar files. ---- :Homepage: https://icalendar.readthedocs.io :Code: https://github.com/collective/icalendar :Mailing list: https://github.com/collective/icalendar/issues :Dependencies: `python-dateutil`_ and `pytz`_. :Compatible with: Python 2.7 and 3.4+ :License: `BSD`_ ---- .. image:: https://travis-ci.org/collective/icalendar.svg?branch=master :target: https://travis-ci.org/collective/icalendar .. _`icalendar`: https://pypi.org/project/icalendar/ .. _`RFC 5545`: https://www.ietf.org/rfc/rfc5545.txt .. _`python-dateutil`: https://github.com/dateutil/dateutil/ .. _`pytz`: https://pypi.org/project/pytz/ .. _`BSD`: https://github.com/collective/icalendar/issues/2 Related projects ================ * `icalevents `_. It is built on top of icalendar and allows you to query iCal files and get the events happening on specific dates. It manages recurrent events as well. upstream-ontologist-0.2.4/readme_tests/python-icalendar/description000064400000000000000000000001251046102023000240520ustar 00000000000000The icalendar package is a RFC 5545 compatible parser/generator for iCalendar files. upstream-ontologist-0.2.4/readme_tests/python-rsa/README.md000064400000000000000000000036101046102023000217300ustar 00000000000000# Pure Python RSA implementation [![PyPI](https://img.shields.io/pypi/v/rsa.svg)](https://pypi.org/project/rsa/) [![Build Status](https://travis-ci.org/sybrenstuvel/python-rsa.svg?branch=master)](https://travis-ci.org/sybrenstuvel/python-rsa) [![Coverage Status](https://coveralls.io/repos/github/sybrenstuvel/python-rsa/badge.svg?branch=master)](https://coveralls.io/github/sybrenstuvel/python-rsa?branch=master) [![Code Climate](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability) [Python-RSA](https://stuvel.eu/rsa) is a pure-Python RSA implementation. It supports encryption and decryption, signing and verifying signatures, and key generation according to PKCS#1 version 1.5. It can be used as a Python library as well as on the commandline. The code was mostly written by Sybren A. Stüvel. Documentation can be found at the [Python-RSA homepage](https://stuvel.eu/rsa). For all changes, check [the changelog](https://github.com/sybrenstuvel/python-rsa/blob/master/CHANGELOG.md). Download and install using: pip install rsa or download it from the [Python Package Index](https://pypi.org/project/rsa/). The source code is maintained at [GitHub](https://github.com/sybrenstuvel/python-rsa/) and is licensed under the [Apache License, version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ## Security Because of how Python internally stores numbers, it is very hard (if not impossible) to make a pure-Python program secure against timing attacks. This library is no exception, so use it with care. See https://securitypitfalls.wordpress.com/2018/08/03/constant-time-compare-in-python/ for more info. ## Setup of Development Environment ``` python3 -m venv .venv . ./.venv/bin/activate pip install poetry poetry install ``` ## Publishing a New Release ``` . ./.venv/bin/activate poetry publish --build ``` upstream-ontologist-0.2.4/readme_tests/python-rsa/description000064400000000000000000000004421046102023000227170ustar 00000000000000Python-RSA is a pure-Python RSA implementation. It supports encryption and decryption, signing and verifying signatures, and key generation according to PKCS#1 version 1.5. It can be used as a Python library as well as on the commandline. The code was mostly written by Sybren A. Stüvel. upstream-ontologist-0.2.4/readme_tests/ruby-columnize/README.md000064400000000000000000000051541046102023000226150ustar 00000000000000[![Build Status](https://travis-ci.org/rocky/columnize.png)](https://travis-ci.org/rocky/columnize) [![Gem Version](https://badge.fury.io/rb/columnize.svg)](http://badge.fury.io/rb/columnize) Columnize - Format an Array as a Column-aligned String ============================================================================ In showing a long lists, sometimes one would prefer to see the value arranged aligned in columns. Some examples include listing methods of an object, listing debugger commands, or showing a numeric array with data aligned. Setup ----- $ irb >> require 'columnize' => true With numeric data ----------------- >> a = (1..10).to_a => [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] >> a.columnize => "1 2 3 4 5 6 7 8 9 10" >> puts a.columnize :arrange_array => true, :displaywidth => 10 [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] => nil >> puts a.columnize :arrange_array => true, :displaywidth => 20 [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] => nil With String data ---------------- >> g = %w(bibrons golden madascar leopard mourning suras tokay) => ["bibrons", "golden", "madascar", "leopard", "mourning", "suras", "tokay"] >> puts g.columnize :displaywidth => 15 bibrons suras golden tokay madascar leopard mourning => nil >> puts g.columnize :displaywidth => 19, :colsep => ' | ' bibrons | suras golden | tokay madascar leopard mourning => nil >> puts g.columnize :displaywidth => 18, :colsep => ' | ', :ljust => false bibrons | mourning golden | suras madascar | tokay leopard => nil Using Columnize.columnize ------------------------- >> Columnize.columnize(a) => "1 2 3 4 5 6 7 8 9 10" >> puts Columnize.columnize(a, :displaywidth => 10) 1 5 9 2 6 10 3 7 4 8 => nil >> Columnize.columnize(g) => "bibrons golden madascar leopard mourning suras tokay" >> puts Columnize.columnize(g, :displaywidth => 19, :colsep => ' | ') bibrons | mourning golden | suras madascar | tokay leopard => nil Credits ------- This is adapted from a method of the same name from Python's cmd module. Other stuff ----------- Authors: Rocky Bernstein [![endorse](https://api.coderwall.com/rocky/endorsecount.png)](https://coderwall.com/rocky) and [Martin Davis](https://github.com/waslogic) License: Copyright (c) 2011,2013 Rocky Bernstein Warranty -------- You can redistribute it and/or modify it under either the terms of the GPL version 2 or the conditions listed in COPYING upstream-ontologist-0.2.4/readme_tests/ruby-columnize/description000064400000000000000000000003371046102023000236020ustar 00000000000000In showing a long lists, sometimes one would prefer to see the value arranged aligned in columns. Some examples include listing methods of an object, listing debugger commands, or showing a numeric array with data aligned. upstream-ontologist-0.2.4/readme_tests/ruby-sha3/README.md000064400000000000000000000076751046102023000214600ustar 00000000000000# sha3 [![Gem Version](https://badge.fury.io/rb/sha3.svg)](https://badge.fury.io/rb/sha3) [![CI](https://secure.travis-ci.org/johanns/sha3.png)](https://secure.travis-ci.org/johanns/sha3) [![Dependencies](https://gemnasium.com/johanns/sha3.png)](https://gemnasium.com/johanns/sha3) [![CodeClimate](https://codeclimate.com/github/johanns/sha3.png)](https://codeclimate.com/github/johanns/sha3) **SHA3 for Ruby** is a native (C) binding to SHA3 (Keccak FIPS 202) cryptographic hashing algorithm. - Home :: [https://github.com/johanns/sha3#readme]() - Issues :: [https://github.com/johanns/sha3/issues]() - Documentation :: [http://rubydoc.info/gems/sha3/frames]() ## Warnings - Version 1.0+ breaks compatibility with previous versions of this gem. - Do NOT use SHA3 to hash passwords; use either ```bcrypt``` or ```scrypt``` instead! ## Module details **SHA3::Digest**: A standard *Digest* _subclass_. The interface, and operation of this class are parallel to digest classes bundled with MRI-based Rubies (e.g.: **Digest::SHA2**, and **OpenSSL::Digest**). See [documentation for Ruby's **Digest** class for additional details](http://www.ruby-doc.org/stdlib-2.2.3/libdoc/digest/rdoc/Digest.html). ## Installation ```shell gem install sha3 ``` ## Usage ```ruby require 'sha3' ``` Valid hash bit-lengths are: *224*, *256*, *384*, *512*. ```ruby :sha224 :sha256 :sha384 :sha512 # SHA3::Digest.new(224) is SHA3::Digest.new(:sha224) ``` Alternatively, you can instantiate using one of four sub-classes: ```ruby SHA3::Digest::SHA224.new() # 224 bits SHA3::Digest::SHA256.new() # 256 bits SHA3::Digest::SHA384.new() # 384 bits SHA3::Digest::SHA512.new() # 512 bits ``` ### Basics ```ruby # Instantiate a new SHA3::Digest class with 256 bit length s = SHA3::Digest.new(:sha256) # OR # s = SHA3::Digest::SHA256.new() # Update hash state, and compute new value s.update "Compute Me" # << is an .update() alias s << "Me too" # Returns digest value in bytes s.digest # => "\xBE\xDF\r\xD9\xA1..." # Returns digest value as hex string s.hexdigest # => "bedf0dd9a15b647..." ### Digest class-methods: ### SHA3::Digest.hexdigest(:sha224, "Hash me, please") # => "200e7bc18cd613..." SHA3::Digest::SHA384.digest("Hash me, please") # => "\xF5\xCEpC\xB0eV..." ``` ### Hashing a file ```ruby # Compute the hash value for given file, and return the result as hex s = SHA3::Digest::SHA224.file("my_fantastical_file.bin").hexdigest # Calling SHA3::Digest.file(...) defaults to SHA256 s = SHA3::Digest.file("tests.sh") # => # ``` ## Development * Native build tools (e.g., GCC, Minigw, etc.) * Gems: rubygems-tasks, rake, rspec, yard ### Testing + RSpec Call ```rake``` to run the included RSpec tests. Only a small subset of test vectors are included in the source repository; however, the complete test vectors suite is available for download. Simply run the ```tests.sh``` shell script (available in the root of source directory) to generate full byte-length RSpec test files. ```sh tests.sh``` ### Rubies Tested with Rubies: - MRI Ruby-Head - MRI 2.1.0 - MRI 2.0.0 - MRI 1.9.3 - MRI 1.9.2 - MRI 1.8.7 - Rubinius 2 On: - Ubuntu 12.04, 12.10, 13.04, 14.04, 15.04 - Windows 7, 8, 8.1, 10 - Mac OS X 10.6 - 10.11 ## Releases - *1.0.1* :: FIPS 202 compliance (breaks compatibility with earlier releases) - *0.2.6* :: Fixed bug #4 - *0.2.5* :: Bug fixes. (See ChangeLog.rdoc) - *0.2.4* :: Bug fixes. (YANKED) - *0.2.3* :: Added documentation file (decoupled form C source); refactored C source. - *0.2.2* :: Added sub-class for each SHA3 supported bit-lengths (example: SHA3::Digest::SHA256). Minor bug fix. - *0.2.0* :: Production worthy, but breaks API compatibility with 0.1.x. Backward-compatibility will be maintained henceforth. - *0.1.x* :: Alpha code, and not suitable for production. ## TO DO - Add SHAKE128/256 support ## Copyright Copyright (c) 2012 - 2015 Johanns Gregorian (https://github.com/johanns) **See LICENSE.txt for details.** upstream-ontologist-0.2.4/readme_tests/ruby-sha3/description000064400000000000000000000001411046102023000224240ustar 00000000000000SHA3 for Ruby is a native (C) binding to SHA3 (Keccak FIPS 202) cryptographic hashing algorithm. upstream-ontologist-0.2.4/readme_tests/samba/README.md000064400000000000000000000114341046102023000207120ustar 00000000000000About Samba =========== Samba is the standard Windows interoperability suite of programs for Linux and Unix. Samba is Free Software licensed under the GNU General Public License and the Samba project is a member of the Software Freedom Conservancy. Since 1992, Samba has provided secure, stable and fast file and print services for all clients using the SMB/CIFS protocol, such as all versions of DOS and Windows, OS/2, Linux and many others. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. For the AD DC implementation a full HOWTO is provided at: https://wiki.samba.org/index.php/Samba4/HOWTO Community guidelines can be read at: https://wiki.samba.org/index.php/How_to_do_Samba:_Nicely This software is freely distributable under the GNU public license, a copy of which you should have received with this software (in a file called COPYING). CONTRIBUTIONS ============= Please see https://wiki.samba.org/index.php/Contribute for detailed set-by-step instructions on how to submit a patch for Samba via GitLab. Samba's GitLab mirror is at https://gitlab.com/samba-team/samba OUR CONTRIBUTORS ================ See https://www.samba.org/samba/team/ for details of the Samba Team, as well as details of all those currently active in Samba development. If you like a particular feature then look through the git change-log (on the web at https://gitweb.samba.org/?p=samba.git;a=summary) and see who added it, then send them an email. Remember that free software of this kind lives or dies by the response we get. If no one tells us they like it then we'll probably move onto something else. MORE INFO ========= DOCUMENTATION ------------- There is quite a bit of documentation included with the package, including man pages and the wiki at https://wiki.samba.org If you would like to help with our documentation, please contribute that improved content to the wiki, we are moving as much content there as possible. MAILING LIST ------------ Please do NOT send subscription/unsubscription requests to the lists! There is a mailing list for discussion of Samba. For details go to or send mail to There is also an announcement mailing list where new versions are announced. To subscribe go to or send mail to . All announcements also go to the samba list, so you only need to be on one. For details of other Samba mailing lists and for access to archives, see MAILING LIST ETIQUETTE ---------------------- A few tips when submitting to this or any mailing list. 1. Make your subject short and descriptive. Avoid the words "help" or "Samba" in the subject. The readers of this list already know that a) you need help, and b) you are writing about samba (of course, you may need to distinguish between Samba PDC and other file sharing software). Avoid phrases such as "what is" and "how do i". Some good subject lines might look like "Slow response with Excel files" or "Migrating from Samba PDC to NT PDC". 2. If you include the original message in your reply, trim it so that only the relevant lines, enough to establish context, are included. Chances are (since this is a mailing list) we've already read the original message. 3. Trim irrelevant headers from the original message in your reply. All we need to see is a) From, b) Date, and c) Subject. We don't even really need the Subject, if you haven't changed it. Better yet is to just preface the original message with "On [date] [someone] wrote:". 4. Please don't reply to or argue about spam, spam filters or viruses on any Samba lists. We do have a spam filtering system that is working quite well thank you very much but occasionally unwanted messages slip through. Deal with it. 5. Never say "Me too." It doesn't help anyone solve the problem. Instead, if you ARE having the same problem, give more information. Have you seen something that the other writer hasn't mentioned, which may be helpful? 6. If you ask about a problem, then come up with the solution on your own or through another source, by all means post it. Someone else may have the same problem and is waiting for an answer, but never hears of it. 7. Give as much *relevant* information as possible such as Samba release number, OS, kernel version, etc... 8. RTFM. Google. WEBSITE ------- A Samba website has been setup with lots of useful info. Connect to: https://www.samba.org/ As well as general information and documentation, this also has searchable archives of the mailing list and links to other useful resources such as the wiki. upstream-ontologist-0.2.4/readme_tests/samba/description000064400000000000000000000014621046102023000217010ustar 00000000000000Samba is the standard Windows interoperability suite of programs for Linux and Unix. Samba is Free Software licensed under the GNU General Public License and the Samba project is a member of the Software Freedom Conservancy. Since 1992, Samba has provided secure, stable and fast file and print services for all clients using the SMB/CIFS protocol, such as all versions of DOS and Windows, OS/2, Linux and many others. Samba is an important component to seamlessly integrate Linux/Unix Servers and Desktops into Active Directory environments. It can function both as a domain controller or as a regular domain member. For the AD DC implementation a full HOWTO is provided at: https://wiki.samba.org/index.php/Samba4/HOWTO Community guidelines can be read at: https://wiki.samba.org/index.php/How_to_do_Samba:_Nicely upstream-ontologist-0.2.4/readme_tests/saneyaml/README.rst000064400000000000000000000026101046102023000216440ustar 00000000000000======== saneyaml ======== This micro library is a PyYaml wrapper with sane behaviour to read and write readable YAML safely, typically when used with configuration files. With saneyaml you can dump readable and clean YAML and load safely any YAML preserving ordering and avoiding surprises of type conversions by loading everything except booleans as strings. Optionally you can check for duplicated map keys when loading YAML. Works with Python 2 and 3. Requires PyYAML. License: apache-2.0 Homepage_url: https://github.com/nexB/saneyaml Usage:: pip install saneyaml >>> from saneyaml import load as l >>> from saneyaml import dump as d >>> a=l('''version: 3.0.0.dev6 ... ... description: | ... AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file ... provides a way to document a software component. ... ''') >>> a OrderedDict([ (u'version', u'3.0.0.dev6'), (u'description', u'AboutCode Toolkit is a tool to process ABOUT files. ' 'An ABOUT file\nprovides a way to document a software component.\n')]) >>> pprint(a.items()) [(u'version', u'3.0.0.dev6'), (u'description', u'AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file\nprovides a way to document a software component.\n')] >>> print(d(a)) version: 3.0.0.dev6 description: | AboutCode Toolkit is a tool to process ABOUT files. An ABOUT file provides a way to document a software component. upstream-ontologist-0.2.4/readme_tests/saneyaml/description000064400000000000000000000007001046102023000224210ustar 00000000000000This micro library is a PyYaml wrapper with sane behaviour to read and write readable YAML safely, typically when used with configuration files. With saneyaml you can dump readable and clean YAML and load safely any YAML preserving ordering and avoiding surprises of type conversions by loading everything except booleans as strings. Optionally you can check for duplicated map keys when loading YAML. Works with Python 2 and 3. Requires PyYAML. upstream-ontologist-0.2.4/readme_tests/sfcgal/README.md000064400000000000000000000004411046102023000210620ustar 00000000000000SFCGAL ====== SFCGAL is a C++ wrapper library around [CGAL](http://www.cgal.org) with the aim of supporting ISO 191007:2013 and OGC Simple Features for 3D operations. Please refer to the project page for an updated installation procedure. upstream-ontologist-0.2.4/readme_tests/sfcgal/description000064400000000000000000000002031046102023000220450ustar 00000000000000SFCGAL is a C++ wrapper library around CGAL with the aim of supporting ISO 191007:2013 and OGC Simple Features for 3D operations. upstream-ontologist-0.2.4/readme_tests/statuscake/README.md000064400000000000000000000006071046102023000217760ustar 00000000000000# statuscake [![Build Status](https://travis-ci.org/DreamItGetIT/statuscake.svg?branch=master)](https://travis-ci.org/DreamItGetIT/statuscake) `statuscake` is a Go pkg that implements a client for the [statuscake]("https://statuscake.com") API. More documentation and examples at [http://godoc.org/github.com/DreamItGetIT/statuscake](http://godoc.org/github.com/DreamItGetIT/statuscake). upstream-ontologist-0.2.4/readme_tests/statuscake/description000064400000000000000000000001101046102023000227520ustar 00000000000000statuscake is a Go pkg that implements a client for the statuscake API. upstream-ontologist-0.2.4/readme_tests/text-worddif/README.md000064400000000000000000000022451046102023000222470ustar 00000000000000Text/WordDiff version 0.09 ========================== This library's module, Text::WordDiff, is a variation on the lovely [Text::Diff](http://search.cpan.org/perldoc?Text::Diff) module. Rather than generating traditional line-oriented diffs, however, it generates word-oriented diffs. This can be useful for tracking changes in narrative documents or documents with very long lines. To diff source code, one is still best off using Text::Diff. But if you want to see how a short story changed from one version to the next, this module will do the job very nicely. INSTALLATION To install this module, type the following: perl Build.PL ./Build ./Build test ./Build install Or, if you don't have Module::Build installed, type the following: perl Makefile.PL make make test make install Dependencies ------------ Text::WordDiff requires the following modules: * Algorithm::Diff '1.19', * Term::ANSIColor '0', * HTML::Entities '0', Copyright and License --------------------- Copyright (c) 2005-2011 David E. Wheeler. Some Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. upstream-ontologist-0.2.4/readme_tests/text-worddif/description000064400000000000000000000007211046102023000232330ustar 00000000000000This library's module, Text::WordDiff, is a variation on the lovely Text::Diff module. Rather than generating traditional line-oriented diffs, however, it generates word-oriented diffs. This can be useful for tracking changes in narrative documents or documents with very long lines. To diff source code, one is still best off using Text::Diff. But if you want to see how a short story changed from one version to the next, this module will do the job very nicely. upstream-ontologist-0.2.4/readme_tests/wandio/README000064400000000000000000000024441046102023000205120ustar 00000000000000WANDIO 4.2.1 --------------------------------------------------------------------------- Copyright (c) 2007-2019 The University of Waikato, Hamilton, New Zealand. All rights reserved. This code has been developed by the University of Waikato WAND research group. For further information please see http://www.wand.net.nz/. --------------------------------------------------------------------------- See INSTALL for instructions on how to install WANDIO. This directory contains source code for WANDIO, a library for reading from, and writing to, files. Depending on libraries available at compile time, WANDIO provides transparent compression/decompression for the following formats: - zlib (gzip) - bzip2 - lzo (write-only) - lzma - zstd - lz4 - Intel QAT (write-only) - http (read-only) WANDIO also improves IO performance by performing compression/decompression in a separate thread (if pthreads are available). Documentation for WANDIO and its included tools can be found at https://github.com/wanduow/wandio/wiki Bugs should be reported by either emailing contact@wand.net.nz or filing an issue at https://github.com/wanduow/wandio It is licensed under the Lesser GNU General Public License (LGPL) version 3. Please see the included files COPYING and COPYING.LESSER for details of this license. upstream-ontologist-0.2.4/readme_tests/wandio/description000064400000000000000000000007261046102023000221010ustar 00000000000000This directory contains source code for WANDIO, a library for reading from, and writing to, files. Depending on libraries available at compile time, WANDIO provides transparent compression/decompression for the following formats: - zlib (gzip) - bzip2 - lzo (write-only) - lzma - zstd - lz4 - Intel QAT (write-only) - http (read-only) WANDIO also improves IO performance by performing compression/decompression in a separate thread (if pthreads are available). upstream-ontologist-0.2.4/src/bin/autocodemeta.rs000064400000000000000000000110751046102023000202510ustar 00000000000000use clap::Parser; use serde::Serialize; use std::collections::HashSet; use std::io::Write; use std::path::PathBuf; use upstream_ontologist::UpstreamDatum; #[derive(Serialize, Default)] struct SoftwareSourceCode { name: Option, version: Option, #[serde(rename = "codeRepository")] code_repository: Option, #[serde(rename = "issueTracker")] issue_tracker: Option, license: Option, description: Option, // TODO(jelmer): Support setting contIntegration // TODO(jelmer): Support keywords // TODO(jelmer): Support funder // TODO(jelmer): Support funding // TODO(jelmer): Support creation date // TODO(jelmer): Support first release date // TODO(jelmer): Support unique identifier // TODO(jelmer): Support runtime platform // TODO(jelmer): Support other software requirements // TODO(jelmer): Support operating system // TODO(jelmer): Support development status // TODO(jelmer): Support reference publication // TODO(jelmer): Support part of // TODO(jelmer): Support Author #[serde(rename = "downloadUrl")] download_url: Option, #[serde(rename = "relatedLink")] related_link: HashSet, } fn valid_spdx_identifier(name: &str) -> bool { name.chars() .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '+') } fn codemeta_file_from_upstream_info(data: Vec) -> SoftwareSourceCode { let mut result = SoftwareSourceCode { ..Default::default() }; for upstream_datum in data { match upstream_datum { UpstreamDatum::Name(n) => { result.name = Some(n); } UpstreamDatum::Homepage(h) => { result.related_link.insert(h); } UpstreamDatum::Description(d) => { result.description = Some(d); } UpstreamDatum::Download(d) => { result.download_url = Some(d); } UpstreamDatum::MailingList(ml) => { result.related_link.insert(ml); } UpstreamDatum::BugDatabase(bd) => { result.issue_tracker = Some(bd); } UpstreamDatum::Screenshots(us) => { for u in us { result.related_link.insert(u); } } UpstreamDatum::Wiki(r) => { result.related_link.insert(r); } UpstreamDatum::Repository(r) => { result.code_repository = Some(r); } UpstreamDatum::RepositoryBrowse(r) => { result.related_link.insert(r); } UpstreamDatum::License(l) => { if valid_spdx_identifier(&l) { result.license = Some(format!("https://spdx.org/licenses/{}", l)); } } UpstreamDatum::Version(v) => { result.version = Some(v); } UpstreamDatum::Documentation(a) => { result.related_link.insert(a); } _ => {} } } result } #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Consult external directory for metadata #[clap(long)] consult_external_directory: bool, } #[tokio::main] async fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); let path = args.path.canonicalize().unwrap(); let upstream_info = upstream_ontologist::get_upstream_info( path.as_path(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .await .unwrap(); let codemeta = codemeta_file_from_upstream_info(upstream_info.into()); std::io::stdout() .write_all(serde_json::to_string_pretty(&codemeta).unwrap().as_bytes()) .unwrap(); } upstream-ontologist-0.2.4/src/bin/autodoap.rs000064400000000000000000000223671046102023000174210ustar 00000000000000use clap::Parser; use maplit::hashmap; use std::io::Write; use std::path::PathBuf; use upstream_ontologist::UpstreamDatum; use xmltree::{Element, Namespace, XMLNode}; const DOAP_NS: &str = "http://usefulinc.com/ns/doap"; const RDF_NS: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns"; const FOAF_NS: &str = "http://xmlns.com/foaf/0.1/"; fn rdf_resource(namespace: &Namespace, url: String) -> XMLNode { XMLNode::Element(Element { prefix: Some("rdf".to_string()), namespaces: Some(namespace.clone()), namespace: Some(RDF_NS.to_string()), name: "resource".to_string(), attributes: hashmap! {"rdf:resource".to_string() => url}, children: vec![], }) } fn doap_file_from_upstream_info(data: Vec) -> Element { let mut namespace = Namespace::empty(); namespace.put("doap", DOAP_NS); namespace.put("rdf", RDF_NS); namespace.put("foaf", FOAF_NS); let mut repository = None; let mut repository_browse = None; let mut children = vec![]; for upstream_datum in data { match upstream_datum { UpstreamDatum::Name(n) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "name".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(n)], })); } UpstreamDatum::Homepage(h) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "homepage".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, h)], })); } UpstreamDatum::Summary(s) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "shortdesc".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(s)], })); } UpstreamDatum::Description(d) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "description".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Text(d)], })); } UpstreamDatum::Download(d) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "download-page".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, d)], })); } UpstreamDatum::MailingList(ml) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "mailing-list".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, ml)], })); } UpstreamDatum::BugDatabase(bd) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "bug-database".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, bd)], })); } UpstreamDatum::Screenshots(us) => { for u in us { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "screenshots".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, u)], })); } } UpstreamDatum::SecurityContact(sc) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "security-contact".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, sc)], })); } UpstreamDatum::Wiki(r) => { children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "wiki".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, r)], })); } UpstreamDatum::Repository(r) => { repository = Some(r); } UpstreamDatum::RepositoryBrowse(r) => { repository_browse = Some(r); } _ => {} } } if repository.is_some() || repository_browse.is_some() { let mut git_repo_el = Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "GitRepository".to_string(), attributes: hashmap! {}, children: vec![], }; if let Some(r) = repository { git_repo_el.children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "location".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, r)], })); } if let Some(b) = repository_browse { git_repo_el.children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "browse".to_string(), attributes: hashmap! {}, children: vec![rdf_resource(&namespace, b)], })); } children.push(XMLNode::Element(Element { prefix: Some("doap".to_string()), namespaces: Some(namespace.clone()), namespace: Some(DOAP_NS.to_string()), name: "repository".to_string(), attributes: hashmap! {}, children: vec![XMLNode::Element(git_repo_el)], })); } Element { prefix: Some("doap".to_string()), namespaces: Some(namespace), namespace: Some(DOAP_NS.to_string()), name: "Project".to_string(), attributes: hashmap! {}, children, } } #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Consult external directory #[clap(long)] consult_external_directory: bool, } #[tokio::main] async fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); let path = args.path.canonicalize().unwrap(); let upstream_info = upstream_ontologist::get_upstream_info( path.as_path(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .await .unwrap(); let el = doap_file_from_upstream_info(upstream_info.into()); use xmltree::EmitterConfig; let config = EmitterConfig::new() .perform_indent(true) .normalize_empty_elements(true); el.write_with_config(&mut std::io::stdout(), config) .unwrap(); } upstream-ontologist-0.2.4/src/bin/guess-upstream-metadata.rs000064400000000000000000000110401046102023000223310ustar 00000000000000use clap::Parser; use std::io::Write; use futures::stream::StreamExt; use std::path::PathBuf; #[derive(Parser, Debug)] #[command(author, version)] struct Args { /// Whether to allow running code from the package #[clap(long)] trust: bool, /// Whether to enable debug logging #[clap(long)] debug: bool, /// Whether to enable trace logging #[clap(long)] trace: bool, /// Do not probe external services #[clap(long)] disable_net_access: bool, /// Check guesssed metadata against external sources #[clap(long)] check: bool, /// Path to sources #[clap(default_value = ".")] path: PathBuf, /// Scan for metadata rather than printing results #[clap(long)] scan: bool, /// Scan specified homepage rather than current directory #[clap(long)] from_homepage: Option, /// Find data based on specified repology id #[clap(long)] from_repology: Option, /// Pull in external (not maintained by upstream) directory data #[clap(long)] consult_external_directory: bool, } #[tokio::main] async fn main() { let args = Args::parse(); env_logger::builder() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter( None, if args.trace { log::LevelFilter::Trace } else if args.debug { log::LevelFilter::Debug } else { log::LevelFilter::Info }, ) .init(); if let Some(from_homepage) = args.from_homepage { for d in upstream_ontologist::homepage::guess_from_homepage(&from_homepage) .await .unwrap() { println!( "{}: {:?} - certainty {} (from {:?})", d.datum.field(), d.datum, d.certainty .map_or_else(|| "unknown".to_string(), |d| d.to_string()), d.origin ); } } else if let Some(id) = args.from_repology { for d in upstream_ontologist::repology::find_upstream_from_repology(&id) .await .unwrap() { println!( "{}: {:?} - certainty {} (from {:?})", d.datum.field(), d.datum, d.certainty .map_or_else(|| "unknown".to_string(), |d| d.to_string()), d.origin ); } } else if args.scan { let mut stream = upstream_ontologist::upstream_metadata_stream( &args.path.canonicalize().unwrap(), Some(args.trust), ); while let Some(entry) = stream.next().await { let entry = entry.unwrap(); println!( "{}: {:?} - certainty {}{}", entry.datum.field(), entry.datum, entry .certainty .map_or("unknown".to_string(), |c| c.to_string()), entry .origin .map_or_else(|| "".to_string(), |o| format!(" (from {:?})", o)) ); } } else { let metadata = match upstream_ontologist::guess_upstream_metadata( &args.path.canonicalize().unwrap(), Some(args.trust), Some(!args.disable_net_access), Some(args.consult_external_directory), Some(args.check), ) .await { Ok(m) => m, Err(upstream_ontologist::ProviderError::ParseError(e)) => { eprintln!("Error parsing metadata: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::IoError(e)) => { eprintln!("I/O Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::Other(e)) => { eprintln!("Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::HttpJsonError(e)) => { eprintln!("Error: {}", e); std::process::exit(1); } Err(upstream_ontologist::ProviderError::ExtrapolationLimitExceeded(l)) => { eprintln!("Extraoplation limit exceeded: {}", l); std::process::exit(1); } }; let out = serde_yaml::to_value(&metadata).unwrap(); std::io::stdout() .write_all(serde_yaml::to_string(&out).unwrap().as_bytes()) .unwrap(); } } upstream-ontologist-0.2.4/src/extrapolate.rs000064400000000000000000000470111046102023000173560ustar 00000000000000use crate::{Certainty, UpstreamDatum, UpstreamDatumWithMetadata}; use crate::{ProviderError, UpstreamMetadata}; use log::warn; const DEFAULT_ITERATION_LIMIT: usize = 10; type ExtrapolationCallback = fn( UpstreamMetadata, bool, ) -> std::pin::Pin< Box< dyn std::future::Future, ProviderError>> + Send, >, >; struct Extrapolation { from_fields: &'static [&'static str], to_fields: &'static [&'static str], cb: ExtrapolationCallback, } async fn extrapolate_repository_from_homepage( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let homepage = upstream_metadata.get("Homepage").unwrap(); let url = match homepage.datum.to_url() { Some(url) => url, None => { return { warn!("Homepage field is not a URL"); Ok(vec![]) } } }; if let Some(repo) = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(homepage.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: homepage.origin.clone(), }); } Ok(ret) } async fn extrapolate_homepage_from_repository_browse( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let browse_url = upstream_metadata.get("Repository-Browse").unwrap(); let url = match browse_url.datum.to_url() { Some(url) => url, None => { return { warn!("Repository-Browse field is not a URL"); Ok(vec![]) } } }; // Some hosting sites are commonly used as Homepage // TODO(jelmer): Maybe check that there is a README file that // can serve as index? let forge = crate::find_forge(&url, Some(net_access)).await; if forge.is_some() && forge.unwrap().repository_browse_can_be_homepage() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(browse_url.datum.as_str().unwrap().to_string()), certainty: Some( std::cmp::min(browse_url.certainty, Some(Certainty::Possible)) .unwrap_or(Certainty::Possible), ), origin: browse_url.origin.clone(), }); } Ok(ret) } async fn copy_bug_db_field( upstream_metadata: &UpstreamMetadata, _net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let old_bug_db = upstream_metadata.get("Bugs-Database").unwrap(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(old_bug_db.datum.as_str().unwrap().to_string()), certainty: old_bug_db.certainty, origin: old_bug_db.origin.clone(), }); Ok(ret) } async fn extrapolate_repository_from_bug_db( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Database").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Bug-Database field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_repository_browse_from_repository( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; let browse_url = crate::vcs::browse_url_from_repo_url( &crate::vcs::VcsLocation { url, branch: None, subpath: None, }, Some(net_access), ) .await; Ok(if let Some(browse_url) = browse_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(browse_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_repository_from_repository_browse( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository-Browse").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository-Browse field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_bug_database_from_repository( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; Ok( if let Some(bug_db_url) = crate::guess_bug_database_url_from_repo_url(&url, Some(net_access)).await { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db_url.to_string()), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }, ) } async fn extrapolate_bug_submit_from_bug_db( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Database").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Bug-Database field is not a URL"); Ok(vec![]) } } }; let bug_submit_url = crate::bug_submit_url_from_bug_database_url(&url, Some(net_access)).await; Ok(if let Some(bug_submit_url) = bug_submit_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugSubmit(bug_submit_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_bug_db_from_bug_submit( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Bug-Submit").unwrap(); let old_value_url = match old_value.datum.to_url() { Some(url) => url, None => return Ok(vec![]), }; let bug_db_url = crate::bug_database_url_from_bug_submit_url(&old_value_url, Some(net_access)).await; Ok(if let Some(bug_db_url) = bug_db_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db_url.to_string()), certainty: old_value.certainty, origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_repository_from_download( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let old_value = upstream_metadata.get("Download").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Download field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; Ok(if let Some(repo) = repo { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }] } else { vec![] }) } async fn extrapolate_name_from_repository( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let mut ret = vec![]; let old_value = upstream_metadata.get("Repository").unwrap(); let url = match old_value.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; let repo = crate::vcs::guess_repo_from_url(&url, Some(net_access)).await; if let Some(repo) = repo { let parsed: url::Url = repo.parse().unwrap(); let name = parsed.path_segments().unwrap().last().unwrap(); let name = name.strip_suffix(".git").unwrap_or(name); if !name.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some( std::cmp::min(old_value.certainty, Some(Certainty::Likely)) .unwrap_or(Certainty::Likely), ), origin: old_value.origin.clone(), }); } } Ok(ret) } async fn extrapolate_security_contact_from_security_md( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { let repository_url = upstream_metadata.get("Repository").unwrap(); let security_md_path = upstream_metadata.get("Security-MD").unwrap(); let url = match repository_url.datum.to_url() { Some(url) => url, None => { return { warn!("Repository field is not a URL"); Ok(vec![]) } } }; let security_url = crate::vcs::browse_url_from_repo_url( &crate::vcs::VcsLocation { url, branch: None, subpath: security_md_path.datum.as_str().map(|x| x.to_string()), }, Some(net_access), ) .await; Ok(if let Some(security_url) = security_url { vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::SecurityContact(security_url.to_string()), certainty: std::cmp::min(repository_url.certainty, security_md_path.certainty), origin: repository_url.origin.clone(), }] } else { vec![] }) } async fn extrapolate_contact_from_maintainer( upstream_metadata: &UpstreamMetadata, _net_access: bool, ) -> Result, ProviderError> { let maintainer = upstream_metadata.get("Maintainer").unwrap(); Ok(vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(maintainer.datum.as_person().unwrap().to_string()), certainty: maintainer.certainty, origin: maintainer.origin.clone(), }]) } async fn consult_homepage( upstream_metadata: &UpstreamMetadata, net_access: bool, ) -> Result, ProviderError> { if !net_access { return Ok(vec![]); } let homepage = upstream_metadata.get("Homepage").unwrap(); let url = match homepage.datum.to_url() { Some(url) => url, None => { return { warn!("Homepage field is not a URL"); Ok(vec![]) } } }; let mut ret = vec![]; for mut entry in crate::homepage::guess_from_homepage(&url).await? { entry.certainty = std::cmp::min(homepage.certainty, entry.certainty); ret.push(entry); } Ok(ret) } const EXTRAPOLATIONS: &[Extrapolation] = &[ Extrapolation { from_fields: &["Homepage"], to_fields: &["Repository"], cb: |us, na| Box::pin(async move { extrapolate_repository_from_homepage(&us, na).await }), }, Extrapolation { from_fields: &["Repository-Browse"], to_fields: &["Homepage"], cb: |us, na| { Box::pin(async move { extrapolate_homepage_from_repository_browse(&us, na).await }) }, }, Extrapolation { from_fields: &["Bugs-Database"], to_fields: &["Bug-Database"], cb: |us, na| Box::pin(async move { copy_bug_db_field(&us, na).await }), }, Extrapolation { from_fields: &["Bug-Database"], to_fields: &["Repository"], cb: |us, na| Box::pin(async move { extrapolate_repository_from_bug_db(&us, na).await }), }, Extrapolation { from_fields: &["Repository"], to_fields: &["Repository-Browse"], cb: |us, na| { Box::pin(async move { extrapolate_repository_browse_from_repository(&us, na).await }) }, }, Extrapolation { from_fields: &["Repository-Browse"], to_fields: &["Repository"], cb: |us, na| { Box::pin(async move { extrapolate_repository_from_repository_browse(&us, na).await }) }, }, Extrapolation { from_fields: &["Repository"], to_fields: &["Bug-Database"], cb: |us, na| { Box::pin(async move { extrapolate_bug_database_from_repository(&us, na).await }) }, }, Extrapolation { from_fields: &["Bug-Database"], to_fields: &["Bug-Submit"], cb: |us, na| Box::pin(async move { extrapolate_bug_submit_from_bug_db(&us, na).await }), }, Extrapolation { from_fields: &["Bug-Submit"], to_fields: &["Bug-Database"], cb: |us, na| Box::pin(async move { extrapolate_bug_db_from_bug_submit(&us, na).await }), }, Extrapolation { from_fields: &["Download"], to_fields: &["Repository"], cb: |us, na| Box::pin(async move { extrapolate_repository_from_download(&us, na).await }), }, Extrapolation { from_fields: &["Repository"], to_fields: &["Name"], cb: |us, na| Box::pin(async move { extrapolate_name_from_repository(&us, na).await }), }, Extrapolation { from_fields: &["Repository", "Security-MD"], to_fields: &["Security-Contact"], cb: |us, na| { Box::pin(async move { extrapolate_security_contact_from_security_md(&us, na).await }) }, }, Extrapolation { from_fields: &["Maintainer"], to_fields: &["Contact"], cb: |us, na| Box::pin(async move { extrapolate_contact_from_maintainer(&us, na).await }), }, Extrapolation { from_fields: &["Homepage"], to_fields: &["Bug-Database", "Repository"], cb: |us, na| Box::pin(async move { consult_homepage(&us, na).await }), }, ]; pub async fn extrapolate_fields( upstream_metadata: &mut UpstreamMetadata, net_access: bool, iteration_limit: Option, ) -> Result<(), ProviderError> { let iteration_limit = iteration_limit.unwrap_or(DEFAULT_ITERATION_LIMIT); let mut changed = true; let mut iterations = 0; while changed { changed = false; iterations += 1; if iterations > iteration_limit { return Err(ProviderError::ExtrapolationLimitExceeded(iteration_limit)); } for extrapolation in EXTRAPOLATIONS { let from_fields = extrapolation.from_fields; let to_fields = extrapolation.to_fields; let cb = extrapolation.cb; let from_values = from_fields .iter() .map(|f| upstream_metadata.get(f)) .collect::>(); if !from_values.iter().all(|v| v.is_some()) { log::trace!( "Not enough values for extrapolation from {:?} to {:?}", from_fields, to_fields ); continue; } let from_values = from_values .iter() .map(|v| v.unwrap().clone()) .collect::>(); let from_certainties = from_fields .iter() .map(|f| upstream_metadata.get(f).unwrap().certainty) .collect::>(); let from_certainty = *from_certainties.iter().min().unwrap(); let old_to_values: std::collections::HashMap<_, _> = to_fields .iter() .filter_map(|f| upstream_metadata.get(f).map(|v| (f, v.clone()))) .collect(); assert!(old_to_values.values().all(|v| v.certainty.is_some())); // If any of the to_fields already exist in old_to_values with a better or same // certainty, then we don't need to extrapolate. if to_fields.iter().all(|f| { old_to_values .get(f) .map(|v| v.certainty >= from_certainty) .unwrap_or(false) }) { log::trace!( "Not extrapolating from {:?} to {:?} because of certainty ({:?} >= {:?})", from_fields, to_fields, old_to_values .values() .map(|v| v.certainty) .collect::>(), from_certainty ); continue; } let extra_upstream_metadata = cb(upstream_metadata.clone(), net_access).await?; let changes = upstream_metadata.update(extra_upstream_metadata.into_iter()); if !changes.is_empty() { log::debug!( "Extrapolating ({:?} ⇒ {:?}) from ({:?})", old_to_values .iter() .map(|(k, v)| format!("{}: {}", k, v.datum)) .collect::>(), changes .iter() .map(|d| format!("{}: {}", d.datum.field(), d.datum)) .collect::>(), from_values .iter() .map(|v| format!( "{}: {} ({})", v.datum.field(), v.datum, v.certainty .map_or_else(|| "unknown".to_string(), |c| c.to_string()) )) .collect::>() ); changed = true; } } } Ok(()) } upstream-ontologist-0.2.4/src/forges/mod.rs000064400000000000000000000000251046102023000170640ustar 00000000000000pub mod sourceforge; upstream-ontologist-0.2.4/src/forges/sourceforge.rs000064400000000000000000000211071046102023000206340ustar 00000000000000use crate::check_bug_database_canonical; use crate::UpstreamDatum; use crate::{load_json_url, HTTPJSONError}; use lazy_regex::regex; use log::{debug, error, warn}; use reqwest::Url; async fn get_sf_metadata(project: &str) -> Option { let url = format!("https://sourceforge.net/rest/p/{}", project); match load_json_url(&Url::parse(url.as_str()).unwrap(), None).await { Ok(data) => Some(data), Err(HTTPJSONError::Error { status, .. }) if status == reqwest::StatusCode::NOT_FOUND => { None } r => panic!("Unexpected result from {}: {:?}", url, r), } } async fn parse_sf_json( data: serde_json::Value, project: &str, subproject: Option<&str>, ) -> Vec { let mut results = Vec::new(); if let Some(name) = data.get("name").and_then(|name| name.as_str()) { results.push(UpstreamDatum::Name(name.to_string())); } if let Some(external_homepage) = data.get("external_homepage").and_then(|url| url.as_str()) { results.push(UpstreamDatum::Homepage(external_homepage.to_string())); } if let Some(preferred_support_url) = data .get("preferred_support_url") .and_then(|url| url.as_str()) .filter(|x| !x.is_empty()) { let preferred_support_url = Url::parse(preferred_support_url).expect("preferred_support_url is not a valid URL"); match check_bug_database_canonical(&preferred_support_url, Some(true)).await { Ok(canonical_url) => { results.push(UpstreamDatum::BugDatabase(canonical_url.to_string())); } Err(_) => { results.push(UpstreamDatum::BugDatabase( preferred_support_url.to_string(), )); } } } let vcs_names = ["hg", "git", "svn", "cvs", "bzr"]; let mut vcs_tools = data.get("tools").map_or_else(Vec::new, |tools| { tools .as_array() .unwrap() .iter() .filter(|tool| vcs_names.contains(&tool.get("name").unwrap().as_str().unwrap())) .map(|tool| { ( tool.get("name").map_or("", |n| n.as_str().unwrap()), tool.get("mount_label").map(|l| l.as_str().unwrap()), tool.clone(), ) }) .collect::, serde_json::Value)>>() }); if vcs_tools.len() > 1 { vcs_tools.retain(|tool| { if let Some(url) = tool .2 .get("url") .and_then(|x| x.as_str()) .and_then(|url| url.strip_suffix('/')) { !["www", "web", "homepage"].contains(&url.rsplit('/').next().unwrap_or("")) } else { true } }); } if vcs_tools.len() > 1 && subproject.is_some() { let new_vcs_tools = vcs_tools .iter() .filter(|tool| tool.1 == subproject) .cloned() .collect::>(); if !new_vcs_tools.is_empty() { vcs_tools = new_vcs_tools; } } if vcs_tools.iter().any(|tool| tool.0 == "cvs") { vcs_tools.retain(|tool| tool.0 != "cvs"); } match vcs_tools.len().cmp(&1) { std::cmp::Ordering::Equal => { let (kind, _, data) = &vcs_tools[0]; match *kind { "git" => { if let Some(url) = data.get("clone_url_https_anon").and_then(|x| x.as_str()) { results.push(UpstreamDatum::Repository(url.to_owned())); } } "svn" => { if let Some(url) = data.get("clone_url_https_anon").and_then(|x| x.as_str()) { results.push(UpstreamDatum::Repository(url.to_owned())); } } "hg" => { if let Some(url) = data.get("clone_url_ro").and_then(|x| x.as_str()) { results.push(UpstreamDatum::Repository(url.to_owned())); } } "cvs" => { let url = format!( "cvs+pserver://anonymous@{}.cvs.sourceforge.net/cvsroot/{}", project, data.get("url") .unwrap() .as_str() .unwrap() .strip_suffix('/') .unwrap_or("") .rsplit('/') .nth(1) .unwrap_or("") ); results.push(UpstreamDatum::Repository(url)); } "bzr" => { // TODO: Implement Bazaar (BZR) handling } _ => { error!("Unknown VCS kind: {}", kind); } } } std::cmp::Ordering::Greater => { warn!("Multiple possible VCS URLs found"); } _ => {} } results } pub async fn guess_from_sf(sf_project: &str, subproject: Option<&str>) -> Vec { let mut results = Vec::new(); match get_sf_metadata(sf_project).await { Some(data) => { results.extend(parse_sf_json(data, sf_project, subproject).await); } None => { debug!("No SourceForge metadata found for {}", sf_project); } } results } pub fn extract_sf_project_name(url: &str) -> Option { let projects_regex = regex!(r"https?://sourceforge\.net/(projects|p)/([^/]+)"); if let Some(captures) = projects_regex.captures(url) { return captures.get(2).map(|m| m.as_str().to_string()); } let sf_regex = regex!(r"https?://(.*).(sf|sourceforge).(net|io)/.*"); if let Some(captures) = sf_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn test_parse_sf_json_svn() { // From https://sourceforge.net/rest/p/gtab let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/gtab.json")).unwrap(); assert_eq!( parse_sf_json(data, "gtab", Some("gtab")).await, vec![ UpstreamDatum::Name("gtab".to_string()), UpstreamDatum::Homepage("http://gtab.sourceforge.net".to_string()), UpstreamDatum::Repository("https://svn.code.sf.net/p/gtab/svn/trunk".to_string()), ] ); } #[tokio::test] async fn test_parse_sf_json_git() { // From https://sourceforge.net/rest/p/zsh let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/zsh.json")).unwrap(); assert_eq!( parse_sf_json(data, "zsh", Some("zsh")).await, vec![ UpstreamDatum::Name("zsh".to_string()), UpstreamDatum::Homepage("http://zsh.sourceforge.net/".to_string()), UpstreamDatum::Repository("https://git.code.sf.net/p/zsh/code".to_string()), ] ); } #[tokio::test] async fn test_parse_sf_json_hg_diff() { // From https://sourceforge.net/rest/p/hg-diff let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/hg-diff.json")).unwrap(); assert_eq!( parse_sf_json(data, "hg-diff", Some("hg-diff")).await, vec![ UpstreamDatum::Name("hg-diff".to_string()), UpstreamDatum::Homepage("http://hg-diff.sourceforge.net/".to_string()), UpstreamDatum::Repository("http://hg.code.sf.net/p/hg-diff/code".to_string()) ] ); } #[tokio::test] async fn test_parse_sf_json_docdb_v() { // From https://sourceforge.net/rest/p/docdb-v let data: serde_json::Value = serde_json::from_str(include_str!("../testdata/docdb-v.json")).unwrap(); assert_eq!( parse_sf_json(data, "docdb-v", Some("docdb-v")).await, vec![ UpstreamDatum::Name("DocDB".to_string()), UpstreamDatum::Homepage("http://docdb-v.sourceforge.net".to_string()), UpstreamDatum::BugDatabase( "http://sourceforge.net/tracker/?func=add&group_id=164024&atid=830064" .to_string() ), UpstreamDatum::Repository("https://git.code.sf.net/p/docdb-v/git".to_string()) ] ); } } upstream-ontologist-0.2.4/src/homepage.rs000064400000000000000000000072001046102023000166070ustar 00000000000000use crate::{Certainty, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use select::document::Document; use select::predicate::Name; pub async fn guess_from_homepage( url: &url::Url, ) -> Result, ProviderError> { let client = crate::http::build_client().build().unwrap(); let response = client.get(url.clone()).send().await?; let body = response.text().await?; Ok(guess_from_page(&body, url)) } fn guess_from_page(text: &str, basehref: &url::Url) -> Vec { let fragment = Document::from(text); let mut result = Vec::new(); for element in fragment.find(Name("a")) { if let Some(href) = element.attr("href") { let labels: Vec> = vec![ element.attr("aria-label").map(|s| s.to_string()), Some(element.text().trim().to_string()), ]; for label in labels.iter().filter_map(|x| x.as_ref()) { match label.to_lowercase().as_str() { "github" | "git" | "repository" | "github repository" => { result.push(UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository( basehref.join(href).unwrap().to_string(), ), certainty: Some(Certainty::Possible), }); } "github bug tracking" | "bug tracker" => { result.push(UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::BugDatabase( basehref.join(href).unwrap().to_string(), ), certainty: Some(Certainty::Possible), }); } _ => {} } } } } result } #[cfg(test)] mod tests { use super::*; #[test] fn test_guess_from_page() { let basehref = url::Url::parse("https://example.com").unwrap(); let text = r#" GitHub repository And here is a link with an aria-label: Debian bug tracker "#; let result = guess_from_page(text, &basehref); assert_eq!( result, vec![ UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository("https://github.com/owner/repo".to_string()), certainty: Some(Certainty::Possible), }, UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::Repository("https://git.samba.org/samba.org".to_string()), certainty: Some(Certainty::Possible), }, UpstreamDatumWithMetadata { origin: Some(Origin::Url(basehref.clone())), datum: UpstreamDatum::BugDatabase("https://bugs.debian.org/123".to_string()), certainty: Some(Certainty::Possible), }, ] ); } } upstream-ontologist-0.2.4/src/http.rs000064400000000000000000000003761046102023000160100ustar 00000000000000// Too aggressive? const DEFAULT_URLLIB_TIMEOUT: u64 = 3; pub fn build_client() -> reqwest::ClientBuilder { reqwest::Client::builder() .user_agent(crate::USER_AGENT) .timeout(std::time::Duration::from_secs(DEFAULT_URLLIB_TIMEOUT)) } upstream-ontologist-0.2.4/src/lib.rs000064400000000000000000004024171046102023000156010ustar 00000000000000// pyo3 macros use a gil-refs feature #![allow(unexpected_cfgs)] use futures::stream::StreamExt; use futures::Stream; use lazy_regex::regex; use log::{debug, warn}; use percent_encoding::utf8_percent_encode; #[cfg(feature = "pyo3")] use pyo3::{ exceptions::{PyRuntimeError, PyTypeError, PyValueError}, prelude::*, types::PyDict, }; use reqwest::header::HeaderMap; use serde::ser::SerializeSeq; use std::cmp::Ordering; use std::pin::Pin; use std::str::FromStr; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; use url::Url; static USER_AGENT: &str = concat!("upstream-ontologist/", env!("CARGO_PKG_VERSION")); pub mod extrapolate; pub mod forges; pub mod homepage; pub mod http; pub mod providers; pub mod readme; pub mod repology; pub mod vcs; pub mod vcs_command; #[cfg(test)] mod upstream_tests { include!(concat!(env!("OUT_DIR"), "/upstream_tests.rs")); } #[cfg(test)] mod readme_tests { include!(concat!(env!("OUT_DIR"), "/readme_tests.rs")); } #[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)] pub enum Certainty { Possible, Likely, Confident, Certain, } #[derive(Clone, Debug, PartialEq, Eq)] pub enum Origin { Path(PathBuf), Url(url::Url), Other(String), } impl std::fmt::Display for Origin { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Origin::Path(path) => write!(f, "{}", path.display()), Origin::Url(url) => write!(f, "{}", url), Origin::Other(s) => write!(f, "{}", s), } } } impl From<&std::path::Path> for Origin { fn from(path: &std::path::Path) -> Self { Origin::Path(path.to_path_buf()) } } impl From for Origin { fn from(path: std::path::PathBuf) -> Self { Origin::Path(path) } } impl From for Origin { fn from(url: url::Url) -> Self { Origin::Url(url) } } #[cfg(feature = "pyo3")] impl ToPyObject for Origin { fn to_object(&self, py: Python) -> PyObject { match self { Origin::Path(path) => path.to_str().unwrap().to_object(py), Origin::Url(url) => url.to_string().to_object(py), Origin::Other(s) => s.to_object(py), } } } #[cfg(feature = "pyo3")] impl IntoPy for Origin { fn into_py(self, py: Python) -> PyObject { match self { Origin::Path(path) => path.to_str().unwrap().to_object(py), Origin::Url(url) => url.to_string().to_object(py), Origin::Other(s) => s.to_object(py), } } } #[cfg(feature = "pyo3")] impl FromPyObject<'_> for Origin { fn extract_bound(ob: &Bound) -> PyResult { if let Ok(path) = ob.extract::() { Ok(Origin::Path(path)) } else if let Ok(s) = ob.extract::() { Ok(Origin::Other(s)) } else { Err(PyTypeError::new_err("expected str or Path")) } } } impl FromStr for Certainty { type Err = String; fn from_str(s: &str) -> Result { match s { "certain" => Ok(Certainty::Certain), "confident" => Ok(Certainty::Confident), "likely" => Ok(Certainty::Likely), "possible" => Ok(Certainty::Possible), _ => Err(format!("unknown certainty: {}", s)), } } } impl std::fmt::Display for Certainty { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Certainty::Certain => write!(f, "certain"), Certainty::Confident => write!(f, "confident"), Certainty::Likely => write!(f, "likely"), Certainty::Possible => write!(f, "possible"), } } } #[cfg(feature = "pyo3")] impl FromPyObject<'_> for Certainty { fn extract_bound(ob: &Bound) -> PyResult { let o: String = ob.extract::()?; o.parse().map_err(PyValueError::new_err) } } #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct Person { pub name: Option, pub email: Option, pub url: Option, } impl serde::ser::Serialize for Person { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { let mut map = serde_yaml::Mapping::new(); if let Some(name) = &self.name { map.insert( serde_yaml::Value::String("name".to_string()), serde_yaml::Value::String(name.to_string()), ); } if let Some(email) = &self.email { map.insert( serde_yaml::Value::String("email".to_string()), serde_yaml::Value::String(email.to_string()), ); } if let Some(url) = &self.url { map.insert( serde_yaml::Value::String("url".to_string()), serde_yaml::Value::String(url.to_string()), ); } let tag = serde_yaml::value::TaggedValue { tag: serde_yaml::value::Tag::new("!Person"), value: serde_yaml::Value::Mapping(map), }; tag.serialize(serializer) } } impl<'a> serde::de::Deserialize<'a> for Person { fn deserialize(deserializer: D) -> Result where D: serde::de::Deserializer<'a>, { let value = serde_yaml::Value::deserialize(deserializer)?; if let serde_yaml::Value::Mapping(map) = value { let mut name = None; let mut email = None; let mut url = None; for (k, v) in map { match k { serde_yaml::Value::String(k) => match k.as_str() { "name" => { if let serde_yaml::Value::String(s) = v { name = Some(s); } } "email" => { if let serde_yaml::Value::String(s) = v { email = Some(s); } } "url" => { if let serde_yaml::Value::String(s) = v { url = Some(s); } } n => { return Err(serde::de::Error::custom(format!("unknown key: {}", n))); } }, n => { return Err(serde::de::Error::custom(format!( "expected string key, got {:?}", n ))); } } } Ok(Person { name, email, url }) } else { Err(serde::de::Error::custom("expected mapping")) } } } impl std::fmt::Display for Person { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name.as_ref().unwrap_or(&"".to_string()))?; if let Some(email) = &self.email { write!(f, " <{}>", email)?; } if let Some(url) = &self.url { write!(f, " ({})", url)?; } Ok(()) } } impl From<&str> for Person { fn from(text: &str) -> Self { let mut text = text.replace(" at ", "@"); text = text.replace(" -at- ", "@"); text = text.replace(" -dot- ", "."); text = text.replace("[AT]", "@"); if text.contains('(') && text.ends_with(')') { if let Some((p1, p2)) = text[..text.len() - 1].split_once('(') { if p2.starts_with("https://") || p2.starts_with("http://") { let url = p2.to_string(); if let Some((name, email)) = parseaddr(p1) { Person { name: Some(name), email: Some(email), url: Some(url), } } else { Person { name: Some(p1.to_string()), url: Some(url), ..Default::default() } } } else if p2.contains('@') { Person { name: Some(p1.to_string()), email: Some(p2.to_string()), ..Default::default() } } else { Person { name: Some(text.to_string()), ..Default::default() } } } else { Person { name: Some(text.to_string()), ..Default::default() } } } else if text.contains('<') { if let Some((name, email)) = parseaddr(text.as_str()) { return Person { name: Some(name), email: Some(email), ..Default::default() }; } else { Person { name: Some(text.to_string()), ..Default::default() } } } else if text.contains('@') && !text.contains(' ') { return Person { email: Some(text), ..Default::default() }; } else { Person { name: Some(text), ..Default::default() } } } } #[cfg(feature = "pyo3")] impl ToPyObject for Person { fn to_object(&self, py: Python) -> PyObject { let m = PyModule::import_bound(py, "upstream_ontologist").unwrap(); let person_cls = m.getattr("Person").unwrap(); person_cls .call1((self.name.as_ref(), self.email.as_ref(), self.url.as_ref())) .unwrap() .into_py(py) } } fn parseaddr(text: &str) -> Option<(String, String)> { let re = regex!(r"(.*?)\s*<([^<>]+)>"); if let Some(captures) = re.captures(text) { let name = captures.get(1).map(|m| m.as_str().trim().to_string()); let email = captures.get(2).map(|m| m.as_str().trim().to_string()); if let (Some(name), Some(email)) = (name, email) { return Some((name, email)); } } None } #[cfg(feature = "pyo3")] impl FromPyObject<'_> for Person { fn extract_bound(ob: &Bound) -> PyResult { let name = ob.getattr("name")?.extract::>()?; let email = ob.getattr("email")?.extract::>()?; let url = ob.getattr("url")?.extract::>()?; Ok(Person { name, email, url }) } } #[derive(Clone, Debug, PartialEq, Eq)] pub enum UpstreamDatum { /// Name of the project Name(String), /// URL to project homepage Homepage(String), /// URL to the project's source code repository Repository(String), /// URL to browse the project's source code repository RepositoryBrowse(String), /// Long description of the project Description(String), /// Short summary of the project (one line) Summary(String), /// License name or SPDX identifier License(String), /// List of authors Author(Vec), /// List of maintainers Maintainer(Person), /// URL of the project's issue tracker BugDatabase(String), /// URL to submit a new bug BugSubmit(String), /// URL to the project's contact page or email address Contact(String), /// Cargo crate name CargoCrate(String), /// Name of the security page name SecurityMD(String), /// URL to the security page or email address SecurityContact(String), /// Last version of the project Version(String), /// List of keywords Keywords(Vec), /// Copyright notice Copyright(String), /// URL to the project's documentation Documentation(String), /// URL to the project's API documentation APIDocumentation(String), /// Go import path GoImportPath(String), /// URL to the project's download page Download(String), /// URL to the project's wiki Wiki(String), /// URL to the project's mailing list MailingList(String), /// SourceForge project name SourceForgeProject(String), Archive(String), /// URL to a demo instance Demo(String), /// PHP PECL package name PeclPackage(String), /// URL to the funding page Funding(String), /// URL to the changelog Changelog(String), /// Haskell package name HaskellPackage(String), /// Debian ITP (Intent To Package) bug number DebianITP(i32), /// List of URLs to screenshots Screenshots(Vec), /// Name of registry Registry(Vec<(String, String)>), /// Recommended way to cite the software CiteAs(String), /// Link for donations (e.g. Paypal, Libera, etc) Donation(String), /// Link to a life instance of the webservice Webservice(String), /// Name of the buildsystem used BuildSystem(String), /// FAQ FAQ(String), } #[derive(PartialEq, Eq, Debug, Clone)] pub struct UpstreamDatumWithMetadata { pub datum: UpstreamDatum, pub origin: Option, pub certainty: Option, } fn known_bad_url(value: &str) -> bool { if value.contains("${") { return true; } false } impl UpstreamDatum { pub fn field(&self) -> &'static str { match self { UpstreamDatum::Summary(..) => "Summary", UpstreamDatum::Description(..) => "Description", UpstreamDatum::Name(..) => "Name", UpstreamDatum::Homepage(..) => "Homepage", UpstreamDatum::Repository(..) => "Repository", UpstreamDatum::RepositoryBrowse(..) => "Repository-Browse", UpstreamDatum::License(..) => "License", UpstreamDatum::Author(..) => "Author", UpstreamDatum::BugDatabase(..) => "Bug-Database", UpstreamDatum::BugSubmit(..) => "Bug-Submit", UpstreamDatum::Contact(..) => "Contact", UpstreamDatum::CargoCrate(..) => "Cargo-Crate", UpstreamDatum::SecurityMD(..) => "Security-MD", UpstreamDatum::SecurityContact(..) => "Security-Contact", UpstreamDatum::Version(..) => "Version", UpstreamDatum::Keywords(..) => "Keywords", UpstreamDatum::Maintainer(..) => "Maintainer", UpstreamDatum::Copyright(..) => "Copyright", UpstreamDatum::Documentation(..) => "Documentation", UpstreamDatum::APIDocumentation(..) => "API-Documentation", UpstreamDatum::GoImportPath(..) => "Go-Import-Path", UpstreamDatum::Download(..) => "Download", UpstreamDatum::Wiki(..) => "Wiki", UpstreamDatum::MailingList(..) => "MailingList", UpstreamDatum::SourceForgeProject(..) => "SourceForge-Project", UpstreamDatum::Archive(..) => "Archive", UpstreamDatum::Demo(..) => "Demo", UpstreamDatum::PeclPackage(..) => "Pecl-Package", UpstreamDatum::HaskellPackage(..) => "Haskell-Package", UpstreamDatum::Funding(..) => "Funding", UpstreamDatum::Changelog(..) => "Changelog", UpstreamDatum::DebianITP(..) => "Debian-ITP", UpstreamDatum::Screenshots(..) => "Screenshots", UpstreamDatum::Registry(..) => "Registry", UpstreamDatum::CiteAs(..) => "Cite-As", UpstreamDatum::Donation(..) => "Donation", UpstreamDatum::Webservice(..) => "Webservice", UpstreamDatum::BuildSystem(..) => "BuildSystem", UpstreamDatum::FAQ(..) => "FAQ", } } pub fn as_str(&self) -> Option<&str> { match self { UpstreamDatum::Name(s) => Some(s), UpstreamDatum::Homepage(s) => Some(s), UpstreamDatum::Repository(s) => Some(s), UpstreamDatum::RepositoryBrowse(s) => Some(s), UpstreamDatum::Description(s) => Some(s), UpstreamDatum::Summary(s) => Some(s), UpstreamDatum::License(s) => Some(s), UpstreamDatum::BugDatabase(s) => Some(s), UpstreamDatum::BugSubmit(s) => Some(s), UpstreamDatum::Contact(s) => Some(s), UpstreamDatum::CargoCrate(s) => Some(s), UpstreamDatum::SecurityMD(s) => Some(s), UpstreamDatum::SecurityContact(s) => Some(s), UpstreamDatum::Version(s) => Some(s), UpstreamDatum::Documentation(s) => Some(s), UpstreamDatum::APIDocumentation(s) => Some(s), UpstreamDatum::GoImportPath(s) => Some(s), UpstreamDatum::Download(s) => Some(s), UpstreamDatum::Wiki(s) => Some(s), UpstreamDatum::MailingList(s) => Some(s), UpstreamDatum::SourceForgeProject(s) => Some(s), UpstreamDatum::Archive(s) => Some(s), UpstreamDatum::Demo(s) => Some(s), UpstreamDatum::PeclPackage(s) => Some(s), UpstreamDatum::HaskellPackage(s) => Some(s), UpstreamDatum::Author(..) => None, UpstreamDatum::Maintainer(..) => None, UpstreamDatum::Keywords(..) => None, UpstreamDatum::Copyright(c) => Some(c), UpstreamDatum::Funding(f) => Some(f), UpstreamDatum::Changelog(c) => Some(c), UpstreamDatum::Screenshots(..) => None, UpstreamDatum::DebianITP(_c) => None, UpstreamDatum::CiteAs(c) => Some(c), UpstreamDatum::Registry(_) => None, UpstreamDatum::Donation(d) => Some(d), UpstreamDatum::Webservice(w) => Some(w), UpstreamDatum::BuildSystem(b) => Some(b), UpstreamDatum::FAQ(f) => Some(f), } } pub fn to_url(&self) -> Option { match self { UpstreamDatum::Name(..) => None, UpstreamDatum::Homepage(s) => Some(s.parse().ok()?), UpstreamDatum::Repository(s) => Some(s.parse().ok()?), UpstreamDatum::RepositoryBrowse(s) => Some(s.parse().ok()?), UpstreamDatum::Description(..) => None, UpstreamDatum::Summary(..) => None, UpstreamDatum::License(..) => None, UpstreamDatum::BugDatabase(s) => Some(s.parse().ok()?), UpstreamDatum::BugSubmit(s) => Some(s.parse().ok()?), UpstreamDatum::Contact(..) => None, UpstreamDatum::CargoCrate(s) => Some(s.parse().ok()?), UpstreamDatum::SecurityMD(..) => None, UpstreamDatum::SecurityContact(..) => None, UpstreamDatum::Version(..) => None, UpstreamDatum::Documentation(s) => Some(s.parse().ok()?), UpstreamDatum::APIDocumentation(s) => Some(s.parse().ok()?), UpstreamDatum::GoImportPath(_s) => None, UpstreamDatum::Download(s) => Some(s.parse().ok()?), UpstreamDatum::Wiki(s) => Some(s.parse().ok()?), UpstreamDatum::MailingList(s) => Some(s.parse().ok()?), UpstreamDatum::SourceForgeProject(s) => Some(s.parse().ok()?), UpstreamDatum::Archive(s) => Some(s.parse().ok()?), UpstreamDatum::Demo(s) => Some(s.parse().ok()?), UpstreamDatum::PeclPackage(_s) => None, UpstreamDatum::HaskellPackage(_s) => None, UpstreamDatum::Author(..) => None, UpstreamDatum::Maintainer(..) => None, UpstreamDatum::Keywords(..) => None, UpstreamDatum::Copyright(..) => None, UpstreamDatum::Funding(s) => Some(s.parse().ok()?), UpstreamDatum::Changelog(s) => Some(s.parse().ok()?), UpstreamDatum::Screenshots(..) => None, UpstreamDatum::DebianITP(_c) => None, UpstreamDatum::Registry(_r) => None, UpstreamDatum::CiteAs(_c) => None, UpstreamDatum::Donation(_d) => None, UpstreamDatum::Webservice(w) => Some(w.parse().ok()?), UpstreamDatum::BuildSystem(_) => None, UpstreamDatum::FAQ(f) => Some(f.parse().ok()?), } } pub fn as_person(&self) -> Option<&Person> { match self { UpstreamDatum::Maintainer(p) => Some(p), _ => None, } } pub fn known_bad_guess(&self) -> bool { match self { UpstreamDatum::BugDatabase(s) | UpstreamDatum::BugSubmit(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("bugzilla.gnome.org") { return true; } if url.host_str() == Some("bugs.freedesktop.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Repository(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("anongit.kde.org") { return true; } if url.host_str() == Some("git.gitorious.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Homepage(s) => { let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("pypi.org") { return true; } if url.host_str() == Some("rubygems.org") { return true; } } UpstreamDatum::RepositoryBrowse(s) => { if known_bad_url(s) { return true; } let url = match Url::parse(s) { Ok(url) => url, Err(_) => return false, }; if url.host_str() == Some("cgit.kde.org") { return true; } if url.path().ends_with("/sign_in") { return true; } } UpstreamDatum::Author(authors) => { for a in authors { if let Some(name) = &a.name { let lc = name.to_lowercase(); if lc.contains("unknown") { return true; } if lc.contains("maintainer") { return true; } if lc.contains("contributor") { return true; } } } } UpstreamDatum::Name(s) => { let lc = s.to_lowercase(); if lc.contains("unknown") { return true; } if lc == "package" { return true; } } UpstreamDatum::Version(s) => { let lc = s.to_lowercase(); if ["devel", "unknown"].contains(&lc.as_str()) { return true; } } _ => {} } false } } impl std::fmt::Display for UpstreamDatum { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpstreamDatum::Name(s) => write!(f, "Name: {}", s), UpstreamDatum::Homepage(s) => write!(f, "Homepage: {}", s), UpstreamDatum::Repository(s) => write!(f, "Repository: {}", s), UpstreamDatum::RepositoryBrowse(s) => write!(f, "RepositoryBrowse: {}", s), UpstreamDatum::Description(s) => write!(f, "Description: {}", s), UpstreamDatum::Summary(s) => write!(f, "Summary: {}", s), UpstreamDatum::License(s) => write!(f, "License: {}", s), UpstreamDatum::BugDatabase(s) => write!(f, "BugDatabase: {}", s), UpstreamDatum::BugSubmit(s) => write!(f, "BugSubmit: {}", s), UpstreamDatum::Contact(s) => write!(f, "Contact: {}", s), UpstreamDatum::CargoCrate(s) => write!(f, "CargoCrate: {}", s), UpstreamDatum::SecurityMD(s) => write!(f, "SecurityMD: {}", s), UpstreamDatum::SecurityContact(s) => write!(f, "SecurityContact: {}", s), UpstreamDatum::Version(s) => write!(f, "Version: {}", s), UpstreamDatum::Documentation(s) => write!(f, "Documentation: {}", s), UpstreamDatum::APIDocumentation(s) => write!(f, "API-Documentation: {}", s), UpstreamDatum::GoImportPath(s) => write!(f, "GoImportPath: {}", s), UpstreamDatum::Download(s) => write!(f, "Download: {}", s), UpstreamDatum::Wiki(s) => write!(f, "Wiki: {}", s), UpstreamDatum::MailingList(s) => write!(f, "MailingList: {}", s), UpstreamDatum::SourceForgeProject(s) => write!(f, "SourceForgeProject: {}", s), UpstreamDatum::Archive(s) => write!(f, "Archive: {}", s), UpstreamDatum::Demo(s) => write!(f, "Demo: {}", s), UpstreamDatum::PeclPackage(s) => write!(f, "PeclPackage: {}", s), UpstreamDatum::Author(authors) => { write!( f, "Author: {}", authors .iter() .map(|a| a.to_string()) .collect::>() .join(", ") ) } UpstreamDatum::Maintainer(maintainer) => { write!(f, "Maintainer: {}", maintainer) } UpstreamDatum::Keywords(keywords) => { write!( f, "Keywords: {}", keywords .iter() .map(|a| a.to_string()) .collect::>() .join(", ") ) } UpstreamDatum::Copyright(s) => { write!(f, "Copyright: {}", s) } UpstreamDatum::Funding(s) => { write!(f, "Funding: {}", s) } UpstreamDatum::Changelog(s) => { write!(f, "Changelog: {}", s) } UpstreamDatum::DebianITP(s) => { write!(f, "DebianITP: {}", s) } UpstreamDatum::HaskellPackage(p) => { write!(f, "HaskellPackage: {}", p) } UpstreamDatum::Screenshots(s) => { write!(f, "Screenshots: {}", s.join(", ")) } UpstreamDatum::Registry(r) => { write!(f, "Registry:")?; for (k, v) in r { write!(f, " - Name: {}", k)?; write!(f, " Entry: {}", v)?; } Ok(()) } UpstreamDatum::CiteAs(c) => { write!(f, "Cite-As: {}", c) } UpstreamDatum::Donation(d) => { write!(f, "Donation: {}", d) } UpstreamDatum::Webservice(w) => { write!(f, "Webservice: {}", w) } UpstreamDatum::BuildSystem(bs) => { write!(f, "BuildSystem: {}", bs) } UpstreamDatum::FAQ(faq) => { write!(f, "FAQ: {}", faq) } } } } impl serde::ser::Serialize for UpstreamDatum { fn serialize(&self, serializer: S) -> Result { match self { UpstreamDatum::Name(s) => serializer.serialize_str(s), UpstreamDatum::Homepage(s) => serializer.serialize_str(s), UpstreamDatum::Repository(s) => serializer.serialize_str(s), UpstreamDatum::RepositoryBrowse(s) => serializer.serialize_str(s), UpstreamDatum::Description(s) => serializer.serialize_str(s), UpstreamDatum::Summary(s) => serializer.serialize_str(s), UpstreamDatum::License(s) => serializer.serialize_str(s), UpstreamDatum::BugDatabase(s) => serializer.serialize_str(s), UpstreamDatum::BugSubmit(s) => serializer.serialize_str(s), UpstreamDatum::Contact(s) => serializer.serialize_str(s), UpstreamDatum::CargoCrate(s) => serializer.serialize_str(s), UpstreamDatum::SecurityMD(s) => serializer.serialize_str(s), UpstreamDatum::SecurityContact(s) => serializer.serialize_str(s), UpstreamDatum::Version(s) => serializer.serialize_str(s), UpstreamDatum::Documentation(s) => serializer.serialize_str(s), UpstreamDatum::APIDocumentation(s) => serializer.serialize_str(s), UpstreamDatum::GoImportPath(s) => serializer.serialize_str(s), UpstreamDatum::Download(s) => serializer.serialize_str(s), UpstreamDatum::Wiki(s) => serializer.serialize_str(s), UpstreamDatum::MailingList(s) => serializer.serialize_str(s), UpstreamDatum::SourceForgeProject(s) => serializer.serialize_str(s), UpstreamDatum::Archive(s) => serializer.serialize_str(s), UpstreamDatum::Demo(s) => serializer.serialize_str(s), UpstreamDatum::PeclPackage(s) => serializer.serialize_str(s), UpstreamDatum::Author(authors) => { let mut seq = serializer.serialize_seq(Some(authors.len()))?; for a in authors { seq.serialize_element(a)?; } seq.end() } UpstreamDatum::Maintainer(maintainer) => maintainer.serialize(serializer), UpstreamDatum::Keywords(keywords) => { let mut seq = serializer.serialize_seq(Some(keywords.len()))?; for a in keywords { seq.serialize_element(a)?; } seq.end() } UpstreamDatum::Copyright(s) => serializer.serialize_str(s), UpstreamDatum::Funding(s) => serializer.serialize_str(s), UpstreamDatum::Changelog(s) => serializer.serialize_str(s), UpstreamDatum::DebianITP(s) => serializer.serialize_i32(*s), UpstreamDatum::HaskellPackage(p) => serializer.serialize_str(p), UpstreamDatum::Screenshots(s) => { let mut seq = serializer.serialize_seq(Some(s.len()))?; for s in s { seq.serialize_element(s)?; } seq.end() } UpstreamDatum::CiteAs(c) => serializer.serialize_str(c), UpstreamDatum::Registry(r) => { let mut l = serializer.serialize_seq(Some(r.len()))?; for (k, v) in r { let mut m = serde_yaml::Mapping::new(); m.insert( serde_yaml::Value::String("Name".to_string()), serde_yaml::to_value(k).unwrap(), ); m.insert( serde_yaml::Value::String("Entry".to_string()), serde_yaml::to_value(v).unwrap(), ); l.serialize_element(&m)?; } l.end() } UpstreamDatum::Donation(d) => serializer.serialize_str(d), UpstreamDatum::Webservice(w) => serializer.serialize_str(w), UpstreamDatum::BuildSystem(bs) => serializer.serialize_str(bs), UpstreamDatum::FAQ(faq) => serializer.serialize_str(faq), } } } #[derive(PartialEq, Eq, Debug, Clone)] pub struct UpstreamMetadata(Vec); impl UpstreamMetadata { pub fn new() -> Self { UpstreamMetadata(Vec::new()) } pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn len(&self) -> usize { self.0.len() } pub fn sort(&mut self) { self.0.sort_by(|a, b| a.datum.field().cmp(b.datum.field())); } pub fn from_data(data: Vec) -> Self { Self(data) } pub fn mut_items(&mut self) -> &mut Vec { &mut self.0 } pub fn iter(&self) -> impl Iterator { self.0.iter() } pub fn mut_iter(&mut self) -> impl Iterator { self.0.iter_mut() } pub fn get(&self, field: &str) -> Option<&UpstreamDatumWithMetadata> { self.0.iter().find(|d| d.datum.field() == field) } pub fn get_mut(&mut self, field: &str) -> Option<&mut UpstreamDatumWithMetadata> { self.0.iter_mut().find(|d| d.datum.field() == field) } pub fn insert(&mut self, datum: UpstreamDatumWithMetadata) { self.0.push(datum); } pub fn contains_key(&self, field: &str) -> bool { self.get(field).is_some() } pub fn discard_known_bad(&mut self) { self.0.retain(|d| !d.datum.known_bad_guess()); } pub fn update( &mut self, new_items: impl Iterator, ) -> Vec { update_from_guesses(&mut self.0, new_items) } pub fn remove(&mut self, field: &str) -> Option { let index = self.0.iter().position(|d| d.datum.field() == field)?; Some(self.0.remove(index)) } pub fn name(&self) -> Option<&str> { self.get("Name").and_then(|d| d.datum.as_str()) } pub fn homepage(&self) -> Option<&str> { self.get("Homepage").and_then(|d| d.datum.as_str()) } pub fn repository(&self) -> Option<&str> { self.get("Repository").and_then(|d| d.datum.as_str()) } pub fn repository_browse(&self) -> Option<&str> { self.get("Repository-Browse").and_then(|d| d.datum.as_str()) } pub fn description(&self) -> Option<&str> { self.get("Description").and_then(|d| d.datum.as_str()) } pub fn summary(&self) -> Option<&str> { self.get("Summary").and_then(|d| d.datum.as_str()) } pub fn license(&self) -> Option<&str> { self.get("License").and_then(|d| d.datum.as_str()) } pub fn author(&self) -> Option<&Vec> { self.get("Author").map(|d| match &d.datum { UpstreamDatum::Author(authors) => authors, _ => unreachable!(), }) } pub fn maintainer(&self) -> Option<&Person> { self.get("Maintainer").map(|d| match &d.datum { UpstreamDatum::Maintainer(maintainer) => maintainer, _ => unreachable!(), }) } pub fn bug_database(&self) -> Option<&str> { self.get("Bug-Database").and_then(|d| d.datum.as_str()) } pub fn bug_submit(&self) -> Option<&str> { self.get("Bug-Submit").and_then(|d| d.datum.as_str()) } pub fn contact(&self) -> Option<&str> { self.get("Contact").and_then(|d| d.datum.as_str()) } pub fn cargo_crate(&self) -> Option<&str> { self.get("Cargo-Crate").and_then(|d| d.datum.as_str()) } pub fn security_md(&self) -> Option<&str> { self.get("Security-MD").and_then(|d| d.datum.as_str()) } pub fn security_contact(&self) -> Option<&str> { self.get("Security-Contact").and_then(|d| d.datum.as_str()) } pub fn version(&self) -> Option<&str> { self.get("Version").and_then(|d| d.datum.as_str()) } pub fn keywords(&self) -> Option<&Vec> { self.get("Keywords").map(|d| match &d.datum { UpstreamDatum::Keywords(keywords) => keywords, _ => unreachable!(), }) } pub fn documentation(&self) -> Option<&str> { self.get("Documentation").and_then(|d| d.datum.as_str()) } pub fn api_documentation(&self) -> Option<&str> { self.get("API-Documentation").and_then(|d| d.datum.as_str()) } pub fn go_import_path(&self) -> Option<&str> { self.get("Go-Import-Path").and_then(|d| d.datum.as_str()) } pub fn download(&self) -> Option<&str> { self.get("Download").and_then(|d| d.datum.as_str()) } pub fn wiki(&self) -> Option<&str> { self.get("Wiki").and_then(|d| d.datum.as_str()) } pub fn mailing_list(&self) -> Option<&str> { self.get("MailingList").and_then(|d| d.datum.as_str()) } pub fn sourceforge_project(&self) -> Option<&str> { self.get("SourceForge-Project") .and_then(|d| d.datum.as_str()) } pub fn archive(&self) -> Option<&str> { self.get("Archive").and_then(|d| d.datum.as_str()) } pub fn demo(&self) -> Option<&str> { self.get("Demo").and_then(|d| d.datum.as_str()) } pub fn pecl_package(&self) -> Option<&str> { self.get("Pecl-Package").and_then(|d| d.datum.as_str()) } pub fn haskell_package(&self) -> Option<&str> { self.get("Haskell-Package").and_then(|d| d.datum.as_str()) } pub fn funding(&self) -> Option<&str> { self.get("Funding").and_then(|d| d.datum.as_str()) } pub fn changelog(&self) -> Option<&str> { self.get("Changelog").and_then(|d| d.datum.as_str()) } pub fn debian_itp(&self) -> Option { self.get("Debian-ITP").and_then(|d| match &d.datum { UpstreamDatum::DebianITP(itp) => Some(*itp), _ => unreachable!(), }) } pub fn screenshots(&self) -> Option<&Vec> { self.get("Screenshots").map(|d| match &d.datum { UpstreamDatum::Screenshots(screenshots) => screenshots, _ => unreachable!(), }) } pub fn donation(&self) -> Option<&str> { self.get("Donation").and_then(|d| d.datum.as_str()) } pub fn cite_as(&self) -> Option<&str> { self.get("Cite-As").and_then(|d| d.datum.as_str()) } pub fn registry(&self) -> Option<&Vec<(String, String)>> { self.get("Registry").map(|d| match &d.datum { UpstreamDatum::Registry(registry) => registry, _ => unreachable!(), }) } pub fn webservice(&self) -> Option<&str> { self.get("Webservice").and_then(|d| d.datum.as_str()) } pub fn buildsystem(&self) -> Option<&str> { self.get("BuildSystem").and_then(|d| d.datum.as_str()) } pub fn copyright(&self) -> Option<&str> { self.get("Copyright").and_then(|d| d.datum.as_str()) } pub fn faq(&self) -> Option<&str> { self.get("FAQ").and_then(|d| d.datum.as_str()) } } impl std::ops::Index<&str> for UpstreamMetadata { type Output = UpstreamDatumWithMetadata; fn index(&self, index: &str) -> &Self::Output { self.get(index).unwrap() } } impl Default for UpstreamMetadata { fn default() -> Self { UpstreamMetadata::new() } } impl Iterator for UpstreamMetadata { type Item = UpstreamDatumWithMetadata; fn next(&mut self) -> Option { self.0.pop() } } impl From for UpstreamDatumWithMetadata { fn from(d: UpstreamDatum) -> Self { UpstreamDatumWithMetadata { datum: d, certainty: None, origin: None, } } } impl From> for UpstreamMetadata { fn from(v: Vec) -> Self { UpstreamMetadata(v) } } impl From> for UpstreamMetadata { fn from(v: Vec) -> Self { UpstreamMetadata( v.into_iter() .map(|d| UpstreamDatumWithMetadata { datum: d, certainty: None, origin: None, }) .collect(), ) } } impl From for Vec { fn from(v: UpstreamMetadata) -> Self { v.0 } } impl From for Vec { fn from(v: UpstreamMetadata) -> Self { v.0.into_iter().map(|d| d.datum).collect() } } impl serde::ser::Serialize for UpstreamMetadata { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { let mut map = serde_yaml::Mapping::new(); for datum in &self.0 { map.insert( serde_yaml::Value::String(datum.datum.field().to_string()), serde_yaml::to_value(datum).unwrap(), ); } map.serialize(serializer) } } #[cfg(feature = "pyo3")] impl ToPyObject for UpstreamDatumWithMetadata { fn to_object(&self, py: Python) -> PyObject { let m = PyModule::import_bound(py, "upstream_ontologist.guess").unwrap(); let cls = m.getattr("UpstreamDatum").unwrap(); let (field, py_datum) = self .datum .to_object(py) .extract::<(String, PyObject)>(py) .unwrap(); let kwargs = pyo3::types::PyDict::new_bound(py); kwargs .set_item("certainty", self.certainty.map(|x| x.to_string())) .unwrap(); kwargs.set_item("origin", self.origin.as_ref()).unwrap(); let datum = cls.call((field, py_datum), Some(&kwargs)).unwrap(); datum.to_object(py) } } impl serde::ser::Serialize for UpstreamDatumWithMetadata { fn serialize(&self, serializer: S) -> Result where S: serde::ser::Serializer, { UpstreamDatum::serialize(&self.datum, serializer) } } pub trait UpstreamDataProvider { fn provide( path: &std::path::Path, trust_package: bool, ) -> dyn Iterator; } #[derive(Debug)] pub enum HTTPJSONError { HTTPError(reqwest::Error), Timeout(tokio::time::Duration), Error { url: reqwest::Url, status: u16, response: reqwest::Response, }, } impl std::fmt::Display for HTTPJSONError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { HTTPJSONError::HTTPError(e) => write!(f, "{}", e), HTTPJSONError::Timeout(timeout) => write!(f, "Timeout after {:?}", timeout), HTTPJSONError::Error { url, status, response: _, } => write!(f, "HTTP error {} for {}:", status, url,), } } } pub async fn load_json_url( http_url: &Url, timeout: Option, ) -> Result { let mut headers = HeaderMap::new(); headers.insert(reqwest::header::ACCEPT, "application/json".parse().unwrap()); if let Some(hostname) = http_url.host_str() { if hostname == "github.com" || hostname == "raw.githubusercontent.com" { if let Ok(token) = std::env::var("GITHUB_TOKEN") { headers.insert( reqwest::header::WWW_AUTHENTICATE, format!("Bearer {}", token).parse().unwrap(), ); } } } let client = crate::http::build_client() .default_headers(headers) .build() .map_err(HTTPJSONError::HTTPError)?; let http_url: reqwest::Url = Into::::into(http_url.clone()).parse().unwrap(); let request = client .get(http_url) .build() .map_err(HTTPJSONError::HTTPError)?; let timeout = timeout.unwrap_or(std::time::Duration::from_secs(30)); let response = tokio::time::timeout(timeout, client.execute(request)) .await .map_err(|_| HTTPJSONError::Timeout(timeout))? .map_err(HTTPJSONError::HTTPError)?; if !response.status().is_success() { return Err(HTTPJSONError::Error { url: response.url().clone(), status: response.status().as_u16(), response, }); } let json_contents: serde_json::Value = response.json().await.map_err(HTTPJSONError::HTTPError)?; Ok(json_contents) } fn xmlparse_simplify_namespaces(path: &Path, namespaces: &[&str]) -> Option { let namespaces = namespaces .iter() .map(|ns| format!("{{{}{}}}", ns, ns)) .collect::>(); let mut f = std::fs::File::open(path).unwrap(); let mut buf = Vec::new(); f.read_to_end(&mut buf).ok()?; let mut tree = xmltree::Element::parse(std::io::Cursor::new(buf)).ok()?; simplify_namespaces(&mut tree, &namespaces); Some(tree) } fn simplify_namespaces(element: &mut xmltree::Element, namespaces: &[String]) { use xmltree::XMLNode; element.prefix = None; if let Some(namespace) = namespaces.iter().find(|&ns| element.name.starts_with(ns)) { element.name = element.name[namespace.len()..].to_string(); } for child in &mut element.children { if let XMLNode::Element(ref mut child_element) = child { simplify_namespaces(child_element, namespaces); } } } pub enum CanonicalizeError { InvalidUrl(Url, String), Unverifiable(Url, String), RateLimited(Url), } pub async fn check_url_canonical(url: &Url) -> Result { if url.scheme() != "http" && url.scheme() != "https" { return Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unsupported scheme {}", url.scheme()), )); } let client = crate::http::build_client() .build() .map_err(|e| CanonicalizeError::Unverifiable(url.clone(), format!("HTTP error {}", e)))?; let response = client.get(url.clone()).send().await.map_err(|e| { CanonicalizeError::Unverifiable(url.clone(), format!("HTTP error {}", e)) })?; match response.status() { status if status.is_success() => Ok(response.url().clone()), status if status == reqwest::StatusCode::TOO_MANY_REQUESTS => { Err(CanonicalizeError::RateLimited(url.clone())) } status if status == reqwest::StatusCode::NOT_FOUND => Err(CanonicalizeError::InvalidUrl( url.clone(), format!("Not found: {}", response.status()), )), status if status.is_server_error() => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Server down: {}", response.status()), )), _ => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unknown HTTP error {}", response.status()), )), } } pub fn with_path_segments(url: &Url, path_segments: &[&str]) -> Result { let mut url = url.clone(); url.path_segments_mut()? .clear() .extend(path_segments.iter()); Ok(url) } #[async_trait::async_trait] pub trait Forge: Send + Sync { fn repository_browse_can_be_homepage(&self) -> bool; fn name(&self) -> &'static str; fn bug_database_url_from_bug_submit_url(&self, _url: &Url) -> Option { None } fn bug_submit_url_from_bug_database_url(&self, _url: &Url) -> Option { None } async fn check_bug_database_canonical(&self, url: &Url) -> Result { Err(CanonicalizeError::Unverifiable( url.clone(), "Not implemented".to_string(), )) } async fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { Err(CanonicalizeError::Unverifiable( url.clone(), "Not implemented".to_string(), )) } fn bug_database_from_issue_url(&self, _url: &Url) -> Option { None } fn bug_database_url_from_repo_url(&self, _url: &Url) -> Option { None } fn repo_url_from_merge_request_url(&self, _url: &Url) -> Option { None } async fn extend_metadata( &self, _metadata: &mut Vec, _project: &str, _max_certainty: Option, ) { } } pub struct GitHub; impl Default for GitHub { fn default() -> Self { Self::new() } } impl GitHub { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl Forge for GitHub { fn name(&self) -> &'static str { "GitHub" } fn repository_browse_can_be_homepage(&self) -> bool { true } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 && path_elements.len() != 4 { return None; } if path_elements[2] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); Some(with_path_segments(&url, &path_elements[0..3]).unwrap()) } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 { return None; } if path_elements[2] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); url.path_segments_mut().unwrap().push("new"); Some(url) } async fn check_bug_database_canonical(&self, url: &Url) -> Result { assert_eq!(url.host(), Some(url::Host::Domain("github.com"))); let path_elements = url.path_segments().unwrap().collect::>(); if path_elements.len() != 3 { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitHub URL with missing path elements".to_string(), )); } if path_elements[2] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitHub URL with missing path elements".to_string(), )); } let api_url = Url::parse(&format!( "https://api.github.com/repos/{}/{}", path_elements[0], path_elements[1] )) .unwrap(); let response = match reqwest::get(api_url).await { Ok(response) => response, Err(e) if e.status() == Some(reqwest::StatusCode::NOT_FOUND) => { return Err(CanonicalizeError::InvalidUrl( url.clone(), format!("Project does not exist {}", e), )); } Err(e) if e.status() == Some(reqwest::StatusCode::FORBIDDEN) => { // Probably rate limited warn!("Unable to verify bug database URL {}: {}", url, e); return Err(CanonicalizeError::RateLimited(url.clone())); } Err(e) => { return Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), )); } }; let data = response.json::().await.map_err(|e| { CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), ) })?; if data["has_issues"].as_bool() != Some(true) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project does not have issues enabled".to_string(), )); } if data.get("archived").unwrap_or(&serde_json::Value::Null) == &serde_json::Value::Bool(true) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project is archived".to_string(), )); } let mut url = Url::parse(data["html_url"].as_str().ok_or_else(|| { CanonicalizeError::Unverifiable( url.clone(), "Unable to verify bug database URL: no html_url".to_string(), ) })?) .map_err(|e| { CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {}", e), ) })?; url.set_scheme("https").expect("valid scheme"); url.path_segments_mut() .expect("path segments") .push("issues"); Ok(url) } async fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { let mut path_segments = url.path_segments().unwrap().collect::>(); path_segments.pop(); let db_url = with_path_segments(url, &path_segments).unwrap(); let mut canonical_db_url = self.check_bug_database_canonical(&db_url).await?; canonical_db_url.set_scheme("https").expect("valid scheme"); canonical_db_url .path_segments_mut() .expect("path segments") .push("new"); Ok(canonical_db_url) } fn bug_database_from_issue_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[1] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").unwrap(); Some(with_path_segments(&url, &path_elements[0..3]).unwrap()) } fn bug_database_url_from_repo_url(&self, url: &Url) -> Option { let mut path = url .path_segments() .into_iter() .take(2) .flatten() .collect::>(); path[1] = path[1].strip_suffix(".git").unwrap_or(path[1]); path.push("issues"); let mut url = url.clone(); url.set_scheme("https").unwrap(); Some(with_path_segments(&url, path.as_slice()).unwrap()) } fn repo_url_from_merge_request_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[1] != "issues" { return None; } let mut url = url.clone(); url.set_scheme("https").expect("valid scheme"); Some(with_path_segments(&url, &path_elements[0..2]).unwrap()) } } static DEFAULT_ASCII_SET: percent_encoding::AsciiSet = percent_encoding::CONTROLS .add(b'/') .add(b'?') .add(b'#') .add(b'%'); pub struct GitLab; impl Default for GitLab { fn default() -> Self { Self::new() } } impl GitLab { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl Forge for GitLab { fn name(&self) -> &'static str { "GitLab" } fn repository_browse_can_be_homepage(&self) -> bool { true } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { let mut path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 { return None; } if path_elements[path_elements.len() - 2] != "issues" { return None; } if path_elements[path_elements.len() - 1] != "new" { path_elements.pop(); } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 3]).unwrap()) } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 { return None; } if path_elements[path_elements.len() - 1] != "issues" { return None; } let mut url = url.clone(); url.path_segments_mut().expect("path segments").push("new"); Some(url) } async fn check_bug_database_canonical(&self, url: &Url) -> Result { let host = url .host() .ok_or_else(|| CanonicalizeError::InvalidUrl(url.clone(), "no host".to_string()))?; let mut path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 1] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } path_elements.pop(); let proj = path_elements.join("/"); let proj_segment = utf8_percent_encode(proj.as_str(), &DEFAULT_ASCII_SET); let api_url = Url::parse(&format!( "https://{}/api/v4/projects/{}", host, proj_segment )) .map_err(|_| { CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with invalid project path".to_string(), ) })?; match load_json_url(&api_url, None).await { Ok(data) => { // issues_enabled is only provided when the user is authenticated, // so if we're not then we just fall back to checking the canonical URL let issues_enabled = data .get("issues_enabled") .unwrap_or(&serde_json::Value::Null); if issues_enabled.as_bool() == Some(false) { return Err(CanonicalizeError::InvalidUrl( url.clone(), "Project does not have issues enabled".to_string(), )); } let mut canonical_url = Url::parse(data["web_url"].as_str().unwrap()).unwrap(); canonical_url .path_segments_mut() .unwrap() .extend(&["-", "issues"]); if issues_enabled.as_bool() == Some(true) { return Ok(canonical_url); } check_url_canonical(&canonical_url).await } Err(HTTPJSONError::Error { status, .. }) if status == reqwest::StatusCode::NOT_FOUND => { Err(CanonicalizeError::InvalidUrl( url.clone(), "Project not found".to_string(), )) } Err(e) => Err(CanonicalizeError::Unverifiable( url.clone(), format!("Unable to verify bug database URL: {:?}", e), )), } } async fn check_bug_submit_url_canonical(&self, url: &Url) -> Result { let path_elements = url .path_segments() .expect("valid segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 2] != "issues" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } if path_elements[path_elements.len() - 1] != "new" { return Err(CanonicalizeError::InvalidUrl( url.clone(), "GitLab URL with missing path elements".to_string(), )); } let db_url = with_path_segments(url, &path_elements[0..path_elements.len() - 1]).unwrap(); let mut canonical_db_url = self.check_bug_database_canonical(&db_url).await?; canonical_db_url .path_segments_mut() .expect("valid segments") .push("new"); Ok(canonical_db_url) } fn bug_database_from_issue_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("valid segments") .collect::>(); if path_elements.len() < 2 || path_elements[path_elements.len() - 2] != "issues" || path_elements[path_elements.len() - 1] .parse::() .is_err() { return None; } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 1]).unwrap()) } fn bug_database_url_from_repo_url(&self, url: &Url) -> Option { let mut url = url.clone(); let last = url .path_segments() .expect("valid segments") .last() .unwrap() .to_string(); url.path_segments_mut() .unwrap() .pop() .push(last.trim_end_matches(".git")) .push("issues"); Some(url) } fn repo_url_from_merge_request_url(&self, url: &Url) -> Option { let path_elements = url .path_segments() .expect("path segments") .collect::>(); if path_elements.len() < 3 || path_elements[path_elements.len() - 2] != "merge_requests" || path_elements[path_elements.len() - 1] .parse::() .is_err() { return None; } Some(with_path_segments(url, &path_elements[0..path_elements.len() - 2]).unwrap()) } } pub fn guess_from_travis_yml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_yaml::Value = serde_yaml::from_str(&contents).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut ret = Vec::new(); if let Some(go_import_path) = data.get("go_import_path") { if let Some(go_import_path) = go_import_path.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(go_import_path.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(ret) } pub fn guess_from_environment() -> std::result::Result, ProviderError> { let mut results = Vec::new(); if let Ok(url) = std::env::var("UPSTREAM_BRANCH_URL") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Certain), origin: Some(Origin::Other("environment".to_string())), }); } Ok(results) } fn find_datum<'a>( metadata: &'a [UpstreamDatumWithMetadata], field: &str, ) -> Option<&'a UpstreamDatumWithMetadata> { metadata.iter().find(|d| d.datum.field() == field) } fn set_datum(metadata: &mut Vec, datum: UpstreamDatumWithMetadata) { if let Some(idx) = metadata .iter() .position(|d| d.datum.field() == datum.datum.field()) { metadata[idx] = datum; } else { metadata.push(datum); } } pub fn update_from_guesses( metadata: &mut Vec, new_items: impl Iterator, ) -> Vec { let mut changed = vec![]; for datum in new_items { let current_datum = find_datum(metadata, datum.datum.field()); if current_datum.is_none() || datum.certainty > current_datum.unwrap().certainty { changed.push(datum.clone()); set_datum(metadata, datum); } } changed } fn possible_fields_missing( upstream_metadata: &[UpstreamDatumWithMetadata], fields: &[&str], _field_certainty: Certainty, ) -> bool { for field in fields { match find_datum(upstream_metadata, field) { Some(datum) if datum.certainty != Some(Certainty::Certain) => return true, None => return true, _ => (), } } false } async fn extend_from_external_guesser< F: Fn() -> Fut, Fut: std::future::Future>, >( metadata: &mut Vec, max_certainty: Option, supported_fields: &[&str], new_items: F, ) { if max_certainty.is_some() && !possible_fields_missing(metadata, supported_fields, max_certainty.unwrap()) { return; } let new_items = new_items() .await .into_iter() .map(|item| UpstreamDatumWithMetadata { datum: item, certainty: max_certainty, origin: None, }); update_from_guesses(metadata, new_items); } pub struct SourceForge; impl Default for SourceForge { fn default() -> Self { Self::new() } } impl SourceForge { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl Forge for SourceForge { fn name(&self) -> &'static str { "SourceForge" } fn repository_browse_can_be_homepage(&self) -> bool { false } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { let mut segments = url.path_segments()?; if segments.next() != Some("p") { return None; } let project = segments.next()?; if segments.next() != Some("bugs") { return None; } with_path_segments(url, &["p", project, "bugs"]).ok() } async fn extend_metadata( &self, metadata: &mut Vec, project: &str, max_certainty: Option, ) { let subproject = find_datum(metadata, "Name").and_then(|f| match f.datum { UpstreamDatum::Name(ref name) => Some(name.to_string()), _ => None, }); extend_from_external_guesser( metadata, max_certainty, &["Homepage", "Name", "Repository", "Bug-Database"], || async { crate::forges::sourceforge::guess_from_sf(project, subproject.as_deref()).await }, ) .await } } pub struct Launchpad; impl Default for Launchpad { fn default() -> Self { Self::new() } } impl Launchpad { pub fn new() -> Self { Self } } impl Forge for Launchpad { fn name(&self) -> &'static str { "launchpad" } fn repository_browse_can_be_homepage(&self) -> bool { false } fn bug_database_url_from_bug_submit_url(&self, url: &Url) -> Option { if url.host_str()? != "bugs.launchpad.net" { return None; } let mut segments = url.path_segments()?; let project = segments.next()?; with_path_segments(url, &[project]).ok() } fn bug_submit_url_from_bug_database_url(&self, url: &Url) -> Option { if url.host_str()? != "bugs.launchpad.net" { return None; } let mut segments = url.path_segments()?; let project = segments.next()?; with_path_segments(url, &[project, "+filebug"]).ok() } } pub async fn find_forge(url: &Url, net_access: Option) -> Option> { if url.host_str()? == "sourceforge.net" { return Some(Box::new(SourceForge::new())); } if url.host_str()?.ends_with(".launchpad.net") { return Some(Box::new(Launchpad::new())); } if url.host_str()? == "github.com" { return Some(Box::new(GitHub::new())); } if vcs::is_gitlab_site(url.host_str()?, net_access).await { return Some(Box::new(GitLab::new())); } None } pub async fn check_bug_database_canonical( url: &Url, net_access: Option, ) -> Result { if let Some(forge) = find_forge(url, net_access).await { forge .bug_database_url_from_bug_submit_url(url) .ok_or(CanonicalizeError::Unverifiable( url.clone(), "no bug database URL found".to_string(), )) } else { Err(CanonicalizeError::Unverifiable( url.clone(), "unknown forge".to_string(), )) } } pub async fn bug_submit_url_from_bug_database_url( url: &Url, net_access: Option, ) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_submit_url_from_bug_database_url(url) } else { None } } pub async fn bug_database_url_from_bug_submit_url( url: &Url, net_access: Option, ) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_database_url_from_bug_submit_url(url) } else { None } } pub async fn guess_bug_database_url_from_repo_url( url: &Url, net_access: Option, ) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_database_url_from_repo_url(url) } else { None } } pub async fn repo_url_from_merge_request_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.repo_url_from_merge_request_url(url) } else { None } } pub async fn bug_database_from_issue_url(url: &Url, net_access: Option) -> Option { if let Some(forge) = find_forge(url, net_access).await { forge.bug_database_from_issue_url(url) } else { None } } pub async fn check_bug_submit_url_canonical( url: &Url, net_access: Option, ) -> Result { if let Some(forge) = find_forge(url, net_access).await { forge .bug_submit_url_from_bug_database_url(url) .ok_or(CanonicalizeError::Unverifiable( url.clone(), "no bug submit URL found".to_string(), )) } else { Err(CanonicalizeError::Unverifiable( url.clone(), "unknown forge".to_string(), )) } } pub fn extract_pecl_package_name(url: &str) -> Option { let pecl_regex = regex!(r"https?://pecl\.php\.net/package/(.*)"); if let Some(captures) = pecl_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } pub fn extract_hackage_package(url: &str) -> Option { let hackage_regex = regex!(r"https?://hackage\.haskell\.org/package/([^/]+)/.*"); if let Some(captures) = hackage_regex.captures(url) { return captures.get(1).map(|m| m.as_str().to_string()); } None } /// Obtain metadata from a URL related to the project pub fn metadata_from_url(url: &str, origin: &Origin) -> Vec { let mut results = Vec::new(); if let Some(sf_project) = crate::forges::sourceforge::extract_sf_project_name(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::SourceForgeProject(sf_project), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("SourceForge".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(pecl_package) = extract_pecl_package_name(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::PeclPackage(pecl_package), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Pecl".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(haskell_package) = extract_hackage_package(url) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::HaskellPackage(haskell_package), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Hackage".to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } results } pub async fn get_repology_metadata(srcname: &str, repo: Option<&str>) -> Option { let repo = repo.unwrap_or("debian_unstable"); let url = format!( "https://repology.org/tools/project-by?repo={}&name_type=srcname' '&target_page=api_v1_project&name={}", repo, srcname ); match load_json_url(&Url::parse(url.as_str()).unwrap(), None).await { Ok(json) => Some(json), Err(HTTPJSONError::Error { status: 404, .. }) => None, Err(e) => { debug!("Failed to load repology metadata: {:?}", e); None } } } pub fn guess_from_path( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let basename = path.file_name().and_then(|s| s.to_str()); let mut ret = Vec::new(); if let Some(basename_str) = basename { let re = regex!(r"(.*)-([0-9.]+)"); if let Some(captures) = re.captures(basename_str) { if let Some(name) = captures.get(1) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(version) = captures.get(2) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } else { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(basename_str.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } Ok(ret) } #[cfg(feature = "pyo3")] impl FromPyObject<'_> for UpstreamDatum { fn extract_bound(obj: &Bound) -> PyResult { let (field, val): (String, Bound) = if let Ok((field, val)) = obj.extract::<(String, Bound)>() { (field, val) } else if let Ok(datum) = obj.getattr("datum") { let field = datum.getattr("field")?.extract::()?; let val = datum.getattr("value")?; (field, val) } else if obj.hasattr("field")? && obj.hasattr("value")? { let field = obj.getattr("field")?.extract::()?; let val = obj.getattr("value")?; (field, val) } else { return Err(PyTypeError::new_err(( format!("Expected a tuple of (field, value) or an object with field and value attributesm, found {:?}", obj), ))); }; match field.as_str() { "Name" => Ok(UpstreamDatum::Name(val.extract::()?)), "Version" => Ok(UpstreamDatum::Version(val.extract::()?)), "Homepage" => Ok(UpstreamDatum::Homepage(val.extract::()?)), "Bug-Database" => Ok(UpstreamDatum::BugDatabase(val.extract::()?)), "Bug-Submit" => Ok(UpstreamDatum::BugSubmit(val.extract::()?)), "Contact" => Ok(UpstreamDatum::Contact(val.extract::()?)), "Repository" => Ok(UpstreamDatum::Repository(val.extract::()?)), "Repository-Browse" => Ok(UpstreamDatum::RepositoryBrowse(val.extract::()?)), "License" => Ok(UpstreamDatum::License(val.extract::()?)), "Description" => Ok(UpstreamDatum::Description(val.extract::()?)), "Summary" => Ok(UpstreamDatum::Summary(val.extract::()?)), "Cargo-Crate" => Ok(UpstreamDatum::CargoCrate(val.extract::()?)), "Security-MD" => Ok(UpstreamDatum::SecurityMD(val.extract::()?)), "Security-Contact" => Ok(UpstreamDatum::SecurityContact(val.extract::()?)), "Keywords" => Ok(UpstreamDatum::Keywords(val.extract::>()?)), "Copyright" => Ok(UpstreamDatum::Copyright(val.extract::()?)), "Documentation" => Ok(UpstreamDatum::Documentation(val.extract::()?)), "API-Documentation" => Ok(UpstreamDatum::APIDocumentation(val.extract::()?)), "Go-Import-Path" => Ok(UpstreamDatum::GoImportPath(val.extract::()?)), "Download" => Ok(UpstreamDatum::Download(val.extract::()?)), "Wiki" => Ok(UpstreamDatum::Wiki(val.extract::()?)), "MailingList" => Ok(UpstreamDatum::MailingList(val.extract::()?)), "Funding" => Ok(UpstreamDatum::Funding(val.extract::()?)), "SourceForge-Project" => { Ok(UpstreamDatum::SourceForgeProject(val.extract::()?)) } "Archive" => Ok(UpstreamDatum::Archive(val.extract::()?)), "Demo" => Ok(UpstreamDatum::Demo(val.extract::()?)), "Pecl-Package" => Ok(UpstreamDatum::PeclPackage(val.extract::()?)), "Haskell-Package" => Ok(UpstreamDatum::HaskellPackage(val.extract::()?)), "Author" => Ok(UpstreamDatum::Author(val.extract::>()?)), "Maintainer" => Ok(UpstreamDatum::Maintainer(val.extract::()?)), "Changelog" => Ok(UpstreamDatum::Changelog(val.extract::()?)), "Screenshots" => Ok(UpstreamDatum::Screenshots(val.extract::>()?)), "Cite-As" => Ok(UpstreamDatum::CiteAs(val.extract::()?)), "Registry" => { let v = val.extract::>>()?; let mut registry = Vec::new(); for item in v { let name = item.get_item("Name")?.extract::()?; let entry = item.get_item("Entry")?.extract::()?; registry.push((name, entry)); } Ok(UpstreamDatum::Registry(registry)) } "Donation" => Ok(UpstreamDatum::Donation(val.extract::()?)), "Webservice" => Ok(UpstreamDatum::Webservice(val.extract::()?)), "BuildSystem" => Ok(UpstreamDatum::BuildSystem(val.extract::()?)), "FAQ" => Ok(UpstreamDatum::FAQ(val.extract::()?)), _ => Err(PyRuntimeError::new_err(format!("Unknown field: {}", field))), } } } #[cfg(feature = "pyo3")] impl ToPyObject for UpstreamDatum { fn to_object(&self, py: Python) -> PyObject { ( self.field().to_string(), match self { UpstreamDatum::Name(n) => n.into_py(py), UpstreamDatum::Version(v) => v.into_py(py), UpstreamDatum::Contact(c) => c.into_py(py), UpstreamDatum::Summary(s) => s.into_py(py), UpstreamDatum::License(l) => l.into_py(py), UpstreamDatum::Homepage(h) => h.into_py(py), UpstreamDatum::Description(d) => d.into_py(py), UpstreamDatum::BugDatabase(b) => b.into_py(py), UpstreamDatum::BugSubmit(b) => b.into_py(py), UpstreamDatum::Repository(r) => r.into_py(py), UpstreamDatum::RepositoryBrowse(r) => r.into_py(py), UpstreamDatum::SecurityMD(s) => s.into_py(py), UpstreamDatum::SecurityContact(s) => s.into_py(py), UpstreamDatum::CargoCrate(c) => c.into_py(py), UpstreamDatum::Keywords(ks) => ks.to_object(py), UpstreamDatum::Copyright(c) => c.into_py(py), UpstreamDatum::Documentation(a) => a.into_py(py), UpstreamDatum::APIDocumentation(a) => a.into_py(py), UpstreamDatum::GoImportPath(ip) => ip.into_py(py), UpstreamDatum::Archive(a) => a.into_py(py), UpstreamDatum::Demo(d) => d.into_py(py), UpstreamDatum::Maintainer(m) => m.to_object(py), UpstreamDatum::Author(a) => a.to_object(py), UpstreamDatum::Wiki(w) => w.into_py(py), UpstreamDatum::Download(d) => d.into_py(py), UpstreamDatum::MailingList(m) => m.into_py(py), UpstreamDatum::SourceForgeProject(m) => m.into_py(py), UpstreamDatum::PeclPackage(p) => p.into_py(py), UpstreamDatum::Funding(p) => p.into_py(py), UpstreamDatum::Changelog(c) => c.into_py(py), UpstreamDatum::HaskellPackage(p) => p.into_py(py), UpstreamDatum::DebianITP(i) => i.into_py(py), UpstreamDatum::Screenshots(s) => s.to_object(py), UpstreamDatum::CiteAs(s) => s.to_object(py), UpstreamDatum::Registry(r) => r .iter() .map(|(name, entry)| { let dict = PyDict::new_bound(py); dict.set_item("Name", name).unwrap(); dict.set_item("Entry", entry).unwrap(); dict.into() }) .collect::>() .to_object(py), UpstreamDatum::Donation(d) => d.to_object(py), UpstreamDatum::Webservice(w) => w.to_object(py), UpstreamDatum::BuildSystem(b) => b.to_object(py), UpstreamDatum::FAQ(f) => f.to_object(py), }, ) .to_object(py) } } #[cfg(feature = "pyo3")] impl FromPyObject<'_> for UpstreamDatumWithMetadata { fn extract_bound(obj: &Bound) -> PyResult { let certainty = obj.getattr("certainty")?.extract::>()?; let origin = obj.getattr("origin")?.extract::>()?; let datum = if obj.hasattr("datum")? { obj.getattr("datum")?.extract::() } else { obj.extract::() }?; Ok(UpstreamDatumWithMetadata { datum, certainty: certainty.map(|s| s.parse().unwrap()), origin, }) } } #[derive(Debug)] pub enum ProviderError { ParseError(String), IoError(std::io::Error), Other(String), HttpJsonError(HTTPJSONError), ExtrapolationLimitExceeded(usize), } impl std::fmt::Display for ProviderError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { ProviderError::ParseError(e) => write!(f, "Parse error: {}", e), ProviderError::IoError(e) => write!(f, "IO error: {}", e), ProviderError::Other(e) => write!(f, "Other error: {}", e), ProviderError::HttpJsonError(e) => write!(f, "HTTP JSON error: {}", e), ProviderError::ExtrapolationLimitExceeded(e) => { write!(f, "Extrapolation limit exceeded: {}", e) } } } } impl std::error::Error for ProviderError {} impl From for ProviderError { fn from(e: HTTPJSONError) -> Self { ProviderError::HttpJsonError(e) } } impl From for ProviderError { fn from(e: std::io::Error) -> Self { ProviderError::IoError(e) } } impl From for ProviderError { fn from(e: reqwest::Error) -> Self { ProviderError::Other(e.to_string()) } } #[cfg(feature = "pyo3")] pyo3::create_exception!( upstream_ontologist, ParseError, pyo3::exceptions::PyException ); #[cfg(feature = "pyo3")] impl From for PyErr { fn from(e: ProviderError) -> PyErr { match e { ProviderError::IoError(e) => e.into(), ProviderError::ParseError(e) => ParseError::new_err((e,)), ProviderError::Other(e) => PyRuntimeError::new_err((e,)), ProviderError::HttpJsonError(e) => PyRuntimeError::new_err((e.to_string(),)), ProviderError::ExtrapolationLimitExceeded(e) => { PyRuntimeError::new_err((e.to_string(),)) } } } } #[derive(Debug, Default, Clone)] pub struct GuesserSettings { pub trust_package: bool, } pub struct UpstreamMetadataGuesser { pub name: std::path::PathBuf, pub guess: Box Result, ProviderError>>, } impl std::fmt::Debug for UpstreamMetadataGuesser { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("UpstreamMetadataGuesser") .field("name", &self.name) .finish() } } type OldAsyncGuesser = fn( PathBuf, GuesserSettings, ) -> Pin< Box< dyn std::future::Future, ProviderError>> + Send, >, >; const OLD_STATIC_GUESSERS: &[(&str, OldAsyncGuesser)] = &[ #[cfg(feature = "debian")] ("debian/watch", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_watch(&path, &settings).await }) }), #[cfg(feature = "debian")] ("debian/control", |path, settings| { Box::pin( async move { crate::providers::debian::guess_from_debian_control(&path, &settings) }, ) }), #[cfg(feature = "debian")] ("debian/changelog", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_changelog(&path, &settings).await }) }), #[cfg(feature = "debian")] ("debian/rules", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_rules(&path, &settings) }) }), #[cfg(feature = "python-pkginfo")] ("PKG-INFO", |path, settings| { Box::pin( async move { crate::providers::python::guess_from_pkg_info(&path, &settings).await }, ) }), ("package.json", |path, settings| { Box::pin(async move { crate::providers::package_json::guess_from_package_json(&path, &settings) }) }), ("composer.json", |path, settings| { Box::pin(async move { crate::providers::composer_json::guess_from_composer_json(&path, &settings) }) }), ("package.xml", |path, settings| { Box::pin( async move { crate::providers::package_xml::guess_from_package_xml(&path, &settings) }, ) }), ("package.yaml", |path, settings| { Box::pin(async move { crate::providers::package_yaml::guess_from_package_yaml(&path, &settings) }) }), #[cfg(feature = "dist-ini")] ("dist.ini", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_dist_ini(&path, &settings) }) }), #[cfg(feature = "debian")] ("debian/copyright", |path, settings| { Box::pin(async move { crate::providers::debian::guess_from_debian_copyright(&path, &settings).await }) }), ("META.json", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_json(&path, &settings) }) }), ("MYMETA.json", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_json(&path, &settings) }) }), ("META.yml", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_yml(&path, &settings) }) }), ("MYMETA.yml", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_meta_yml(&path, &settings) }) }), ("configure", |path, settings| { Box::pin(async move { crate::providers::autoconf::guess_from_configure(&path, &settings) }) }), #[cfg(feature = "r-description")] ("DESCRIPTION", |path, settings| { Box::pin( async move { crate::providers::r::guess_from_r_description(&path, &settings).await }, ) }), #[cfg(feature = "cargo")] ("Cargo.toml", |path, settings| { Box::pin(async move { crate::providers::rust::guess_from_cargo(&path, &settings) }) }), ("pom.xml", |path, settings| { Box::pin(async move { crate::providers::maven::guess_from_pom_xml(&path, &settings) }) }), #[cfg(feature = "git-config")] (".git/config", |path, settings| { Box::pin(async move { crate::providers::git::guess_from_git_config(&path, &settings) }) }), ("debian/get-orig-source.sh", |path, settings| { Box::pin(async move { crate::vcs_command::guess_from_get_orig_source(&path, &settings) }) }), #[cfg(feature = "pyproject-toml")] ("pyproject.toml", |path, settings| { Box::pin( async move { crate::providers::python::guess_from_pyproject_toml(&path, &settings) }, ) }), #[cfg(feature = "setup-cfg")] ("setup.cfg", |path, settings| { Box::pin( async move { crate::providers::python::guess_from_setup_cfg(&path, &settings).await }, ) }), ("go.mod", |path, settings| { Box::pin(async move { crate::providers::go::guess_from_go_mod(&path, &settings) }) }), ("Makefile.PL", |path, settings| { Box::pin(async move { crate::providers::perl::guess_from_makefile_pl(&path, &settings) }) }), ("wscript", |path, settings| { Box::pin(async move { crate::providers::waf::guess_from_wscript(&path, &settings) }) }), ("AUTHORS", |path, settings| { Box::pin(async move { crate::providers::authors::guess_from_authors(&path, &settings) }) }), ("INSTALL", |path, settings| { Box::pin(async move { crate::providers::guess_from_install(&path, &settings).await }) }), ("pubspec.yaml", |path, settings| { Box::pin( async move { crate::providers::pubspec::guess_from_pubspec_yaml(&path, &settings) }, ) }), ("pubspec.yml", |path, settings| { Box::pin( async move { crate::providers::pubspec::guess_from_pubspec_yaml(&path, &settings) }, ) }), ("meson.build", |path, settings| { Box::pin(async move { crate::providers::meson::guess_from_meson(&path, &settings) }) }), ("metadata.json", |path, settings| { Box::pin(async move { crate::providers::metadata_json::guess_from_metadata_json(&path, &settings) }) }), (".travis.yml", |path, settings| { Box::pin(async move { crate::guess_from_travis_yml(&path, &settings) }) }), ]; fn find_guessers(path: &std::path::Path) -> Vec> { let mut candidates: Vec> = Vec::new(); let path = path.canonicalize().unwrap(); for (name, cb) in OLD_STATIC_GUESSERS { let subpath = path.join(name); if subpath.exists() { candidates.push(Box::new(PathGuesser { name: name.to_string(), subpath: subpath.clone(), cb: Box::new(move |p, s| Box::pin(cb(p.to_path_buf(), s.clone()))), })); } } for name in ["SECURITY.md", ".github/SECURITY.md", "docs/SECURITY.md"].iter() { if path.join(name).exists() { let subpath = path.join(name); candidates.push(Box::new(PathGuesser { name: name.to_string(), subpath: subpath.clone(), cb: Box::new(|p, s| { let name = name.to_string(); Box::pin(async move { crate::providers::security_md::guess_from_security_md(&name, &p, &s) }) }), })); } } let mut found_pkg_info = path.join("PKG-INFO").exists(); #[cfg(feature = "python-pkginfo")] for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if filename.ends_with(".egg-info") { candidates.push(Box::new(PathGuesser { name: format!("{}/PKG-INFO", filename), subpath: entry.path().join("PKG-INFO"), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::python::guess_from_pkg_info(&p, &s).await }, ) }), })); found_pkg_info = true; } else if filename.ends_with(".dist-info") { candidates.push(Box::new(PathGuesser { name: format!("{}/METADATA", filename), subpath: entry.path().join("METADATA"), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::python::guess_from_pkg_info(&p, &s).await }, ) }), })); found_pkg_info = true; } } #[cfg(feature = "pyo3")] if !found_pkg_info && path.join("setup.py").exists() { candidates.push(Box::new(PathGuesser { name: "setup.py".to_string(), subpath: path.join("setup.py"), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::python::guess_from_setup_py(&path, s.trust_package).await }) }), })); } for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".gemspec") { candidates.push(Box::new(PathGuesser { name: entry.file_name().to_string_lossy().to_string(), subpath: entry.path(), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::ruby::guess_from_gemspec(&p, &s).await }, ) }), })); } } // TODO(jelmer): Perhaps scan all directories if no other primary project information file has been found? #[cfg(feature = "r-description")] for entry in std::fs::read_dir(&path).unwrap() { let entry = entry.unwrap(); let path = entry.path(); if entry.file_type().unwrap().is_dir() { let description_name = format!("{}/DESCRIPTION", entry.file_name().to_string_lossy()); if path.join(&description_name).exists() { candidates.push(Box::new(PathGuesser { name: description_name, subpath: path.join("DESCRIPTION"), cb: Box::new(|p, s| { Box::pin(async move { crate::providers::r::guess_from_r_description(&p, &s).await }) }), })); } } } let mut doap_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if filename.ends_with(".doap") || (filename.ends_with(".xml") && filename.starts_with("doap_XML_")) { Some(entry.file_name()) } else { None } }) .collect::>(); if doap_filenames.len() == 1 { let doap_filename = doap_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: doap_filename.to_string_lossy().to_string(), subpath: path.join(&doap_filename), cb: Box::new(|p, s| { Box::pin( async move { crate::providers::doap::guess_from_doap(&p, s.trust_package) }, ) }), })); } else if doap_filenames.len() > 1 { log::warn!( "Multiple DOAP files found: {:?}, ignoring all.", doap_filenames ); } let mut metainfo_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry .file_name() .to_string_lossy() .ends_with(".metainfo.xml") { Some(entry.file_name()) } else { None } }) .collect::>(); if metainfo_filenames.len() == 1 { let metainfo_filename = metainfo_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: metainfo_filename.to_string_lossy().to_string(), subpath: path.join(&metainfo_filename), cb: Box::new(|p, s| { Box::pin(async move { crate::providers::metainfo::guess_from_metainfo(&p, s.trust_package) }) }), })); } else if metainfo_filenames.len() > 1 { log::warn!( "Multiple metainfo files found: {:?}, ignoring all.", metainfo_filenames ); } let mut cabal_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".cabal") { Some(entry.file_name()) } else { None } }) .collect::>(); if cabal_filenames.len() == 1 { let cabal_filename = cabal_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: cabal_filename.to_string_lossy().to_string(), subpath: path.join(&cabal_filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::haskell::guess_from_cabal(&path, s.trust_package) }) }), })); } else if cabal_filenames.len() > 1 { log::warn!( "Multiple cabal files found: {:?}, ignoring all.", cabal_filenames ); } let readme_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); let filename = entry.file_name().to_string_lossy().to_string(); if !(filename.to_lowercase().starts_with("readme") || filename.to_lowercase().starts_with("hacking") || filename.to_lowercase().starts_with("contributing")) { return None; } if filename.ends_with('~') { return None; } let extension = entry .path() .extension() .map(|s| s.to_string_lossy().to_string()); if extension.as_deref() == Some("html") || extension.as_deref() == Some("pdf") || extension.as_deref() == Some("xml") { return None; } Some(entry.file_name()) }) .collect::>(); for filename in readme_filenames { candidates.push(Box::new(PathGuesser { name: filename.to_string_lossy().to_string(), subpath: path.join(&filename), cb: Box::new(|path, s| { Box::pin( async move { crate::readme::guess_from_readme(&path, s.trust_package).await }, ) }), })); } let mut nuspec_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".nuspec") { Some(entry.file_name()) } else { None } }) .collect::>(); if nuspec_filenames.len() == 1 { let nuspec_filename = nuspec_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: nuspec_filename.to_string_lossy().to_string(), subpath: path.join(&nuspec_filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::nuspec::guess_from_nuspec(&path, s.trust_package).await }) }), })); } else if nuspec_filenames.len() > 1 { log::warn!( "Multiple nuspec files found: {:?}, ignoring all.", nuspec_filenames ); } #[cfg(feature = "opam")] let mut opam_filenames = std::fs::read_dir(&path) .unwrap() .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".opam") { Some(entry.file_name()) } else { None } }) .collect::>(); #[cfg(feature = "opam")] match opam_filenames.len().cmp(&1) { Ordering::Equal => { let opam_filename = opam_filenames.remove(0); candidates.push(Box::new(PathGuesser { name: opam_filename.to_string_lossy().to_string(), subpath: path.join(&opam_filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::ocaml::guess_from_opam(&path, s.trust_package) }) }), })); } Ordering::Greater => { log::warn!( "Multiple opam files found: {:?}, ignoring all.", opam_filenames ); } Ordering::Less => {} } let debian_patches = match std::fs::read_dir(path.join("debian").join("patches")) { Ok(patches) => patches .filter_map(|entry| { let entry = entry.unwrap(); if entry.file_name().to_string_lossy().ends_with(".patch") { Some(format!( "debian/patches/{}", entry.file_name().to_string_lossy() )) } else { None } }) .collect::>(), Err(_) => Vec::new(), }; for filename in debian_patches { candidates.push(Box::new(PathGuesser { name: filename.clone(), subpath: path.join(&filename), cb: Box::new(|path, s| { Box::pin(async move { crate::providers::debian::guess_from_debian_patch(&path, &s).await }) }), })); } candidates.push(Box::new(EnvironmentGuesser::new())); candidates.push(Box::new(PathGuesser { name: ".".to_string(), subpath: path.clone(), cb: Box::new(|p, s| Box::pin(async move { crate::guess_from_path(&p, &s) })), })); candidates } pub(crate) fn stream( path: &Path, config: &GuesserSettings, mut guessers: Vec>, ) -> impl Stream> { // For each of the guessers, stream from the guessers in parallel (using Guesser::stream // rather than Guesser::guess) and then return the results. let abspath = std::env::current_dir().unwrap().join(path); // Create streams for each of the guessers. Call stream on each one of them, manipulate let streams = guessers.iter_mut().map(move |guesser| { let abspath = abspath.clone(); let config = config.clone(); let stream = guesser.stream(&config); let guesser_name = guesser.name().to_string(); stream.map(move |res| { res.map({ let abspath = abspath.clone(); let guesser_name = guesser_name.clone(); move |mut v| { rewrite_upstream_datum(&guesser_name, &mut v, &abspath); v } }) }) }); // Combine the streams into a single stream. futures::stream::select_all(streams) } fn rewrite_upstream_datum( guesser_name: &str, datum: &mut UpstreamDatumWithMetadata, abspath: &std::path::Path, ) { log::trace!("{}: {:?}", guesser_name, datum); datum.origin = datum .origin .clone() .or(Some(Origin::Other(guesser_name.to_string()))); if let Some(Origin::Path(p)) = datum.origin.as_ref() { if let Ok(suffix) = p.strip_prefix(abspath) { if suffix.to_str().unwrap().is_empty() { datum.origin = Some(Origin::Path(PathBuf::from_str(".").unwrap())); } else { datum.origin = Some(Origin::Path(PathBuf::from_str(".").unwrap().join(suffix))); } } } } pub fn upstream_metadata_stream( path: &std::path::Path, trust_package: Option, ) -> impl Stream> { let trust_package = trust_package.unwrap_or(false); let guessers = find_guessers(path); stream(path, &GuesserSettings { trust_package }, guessers) } pub async fn extend_upstream_metadata( upstream_metadata: &mut UpstreamMetadata, path: &std::path::Path, minimum_certainty: Option, net_access: Option, consult_external_directory: Option, ) -> Result<(), ProviderError> { let net_access = net_access.unwrap_or(false); let consult_external_directory = consult_external_directory.unwrap_or(false); let minimum_certainty = minimum_certainty.unwrap_or(Certainty::Confident); // TODO(jelmer): Use EXTRAPOLATE_FNS mechanism for this? for field in [ "Homepage", "Bug-Database", "Bug-Submit", "Repository", "Repository-Browse", "Download", ] { let value = match upstream_metadata.get(field) { Some(value) => value, None => continue, }; if let Some(project) = crate::forges::sourceforge::extract_sf_project_name(value.datum.as_str().unwrap()) { let certainty = Some( std::cmp::min(Some(Certainty::Likely), value.certainty) .unwrap_or(Certainty::Likely), ); upstream_metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("SourceForge".to_string()), certainty, origin: Some(Origin::Other(format!("derived from {}", field))), }); upstream_metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::SourceForgeProject(project), certainty, origin: Some(Origin::Other(format!("derived from {}", field))), }); break; } } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "SourceForge" && upstream_metadata.contains_key("SourceForge-Project") && net_access { let sf_project = upstream_metadata .get("SourceForge-Project") .unwrap() .datum .as_str() .unwrap() .to_string(); let sf_certainty = archive.unwrap().certainty; SourceForge::new() .extend_metadata( upstream_metadata.mut_items(), sf_project.as_str(), sf_certainty, ) .await; } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "Hackage" && upstream_metadata.contains_key("Hackage-Package") && net_access { let hackage_package = upstream_metadata .get("Hackage-Package") .unwrap() .datum .as_str() .unwrap() .to_string(); let hackage_certainty = archive.unwrap().certainty; crate::providers::haskell::Hackage::new() .extend_metadata( upstream_metadata.mut_items(), hackage_package.as_str(), hackage_certainty, ) .await .unwrap(); } let archive = upstream_metadata.get("Archive"); #[cfg(feature = "cargo")] if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "crates.io" && upstream_metadata.contains_key("Cargo-Crate") && net_access { let cargo_crate = upstream_metadata .get("Cargo-Crate") .unwrap() .datum .as_str() .unwrap() .to_string(); let crates_io_certainty = upstream_metadata.get("Archive").unwrap().certainty; crate::providers::rust::CratesIo::new() .extend_metadata( upstream_metadata.mut_items(), cargo_crate.as_str(), crates_io_certainty, ) .await .unwrap(); } let archive = upstream_metadata.get("Archive"); if archive.is_some() && archive.unwrap().datum.as_str().unwrap() == "Pecl" && upstream_metadata.contains_key("Pecl-Package") && net_access { let pecl_package = upstream_metadata .get("Pecl-Package") .unwrap() .datum .as_str() .unwrap() .to_string(); let pecl_certainty = upstream_metadata.get("Archive").unwrap().certainty; crate::providers::php::Pecl::new() .extend_metadata( upstream_metadata.mut_items(), pecl_package.as_str(), pecl_certainty, ) .await .unwrap(); } #[cfg(feature = "debian")] if net_access && consult_external_directory { // TODO(jelmer): Don't assume debian/control exists let package = match debian_control::Control::from_file_relaxed(path.join("debian/control")) { Ok((control, _)) => control.source().and_then(|s| s.name()), Err(_) => None, }; if let Some(package) = package { #[cfg(feature = "launchpad")] extend_from_lp( upstream_metadata.mut_items(), minimum_certainty, package.as_str(), None, None, ) .await; crate::providers::arch::Aur::new() .extend_metadata( upstream_metadata.mut_items(), package.as_str(), Some(minimum_certainty), ) .await .unwrap(); crate::providers::gobo::Gobo::new() .extend_metadata( upstream_metadata.mut_items(), package.as_str(), Some(minimum_certainty), ) .await .unwrap(); extend_from_repology( upstream_metadata.mut_items(), minimum_certainty, package.as_str(), ) .await; } } crate::extrapolate::extrapolate_fields(upstream_metadata, net_access, None).await?; Ok(()) } #[async_trait::async_trait] pub trait ThirdPartyRepository { fn name(&self) -> &'static str; fn supported_fields(&self) -> &'static [&'static str]; fn max_supported_certainty(&self) -> Certainty; async fn extend_metadata( &self, metadata: &mut Vec, name: &str, min_certainty: Option, ) -> Result<(), ProviderError> { if min_certainty.is_some() && min_certainty.unwrap() > self.max_supported_certainty() { // Don't bother if we can't meet minimum certainty return Ok(()); } extend_from_external_guesser( metadata, Some(self.max_supported_certainty()), self.supported_fields(), || async { self.guess_metadata(name).await.unwrap() }, ) .await; Ok(()) } async fn guess_metadata(&self, name: &str) -> Result, ProviderError>; } #[cfg(feature = "launchpad")] async fn extend_from_lp( upstream_metadata: &mut Vec, minimum_certainty: Certainty, package: &str, distribution: Option<&str>, suite: Option<&str>, ) { // The set of fields that Launchpad can possibly provide: let lp_fields = &["Homepage", "Repository", "Name", "Download"][..]; let lp_certainty = Certainty::Possible; if lp_certainty < minimum_certainty { // Don't bother talking to launchpad if we're not // speculating. return; } extend_from_external_guesser(upstream_metadata, Some(lp_certainty), lp_fields, || async { crate::providers::launchpad::guess_from_launchpad(package, distribution, suite) .await .unwrap() }) .await } async fn extend_from_repology( upstream_metadata: &mut Vec, minimum_certainty: Certainty, source_package: &str, ) { // The set of fields that repology can possibly provide: let repology_fields = &["Homepage", "License", "Summary", "Download"][..]; let certainty = Certainty::Confident; if certainty < minimum_certainty { // Don't bother talking to repology if we're not speculating. return; } extend_from_external_guesser( upstream_metadata, Some(certainty), repology_fields, || async { crate::providers::repology::guess_from_repology(source_package) .await .unwrap() }, ) .await } /// Fix existing upstream metadata. pub async fn fix_upstream_metadata(upstream_metadata: &mut UpstreamMetadata) { if let Some(repository) = upstream_metadata.get_mut("Repository") { let url = crate::vcs::sanitize_url(repository.datum.as_str().unwrap()).await; repository.datum = UpstreamDatum::Repository(url.to_string()); } if let Some(summary) = upstream_metadata.get_mut("Summary") { let s = summary.datum.as_str().unwrap(); let s = s.split_once(". ").map_or(s, |(a, _)| a); let s = s.trim_end().trim_end_matches('.'); summary.datum = UpstreamDatum::Summary(s.to_string()); } } /// Summarize the upstream metadata into a dictionary. /// /// # Arguments /// * `metadata_items`: Iterator over metadata items /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `net_access`: Whether to allow net access /// * `consult_external_directory`: Whether to pull in data from external (user-maintained) directories. pub async fn summarize_upstream_metadata( metadata_items: impl Stream, path: &std::path::Path, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let check = check.unwrap_or(false); let mut upstream_metadata = UpstreamMetadata::new(); let metadata_items = metadata_items.filter_map(|item| async move { let bad: bool = item.datum.known_bad_guess(); if bad { log::debug!("Excluding known bad item {:?}", item); None } else { Some(item) } }); let metadata_items = metadata_items.collect::>().await; upstream_metadata.update(metadata_items.into_iter()); extend_upstream_metadata( &mut upstream_metadata, path, None, net_access, consult_external_directory, ) .await?; if check { check_upstream_metadata(&mut upstream_metadata, None).await; } fix_upstream_metadata(&mut upstream_metadata).await; // Sort by name upstream_metadata.sort(); Ok(upstream_metadata) } /// Guess upstream metadata items, in no particular order. /// /// # Arguments /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `minimum_certainty`: Minimum certainty of guesses to return pub fn guess_upstream_metadata_items( path: &std::path::Path, trust_package: Option, minimum_certainty: Option, ) -> impl Stream> { let items = upstream_metadata_stream(path, trust_package); items.filter_map(move |e| async move { match e { Err(e) => Some(Err(e)), Ok(UpstreamDatumWithMetadata { datum, certainty, origin, }) => { if minimum_certainty.is_some() && certainty < minimum_certainty { None } else { Some(Ok(UpstreamDatumWithMetadata { datum, certainty, origin, })) } } } }) } pub async fn get_upstream_info( path: &std::path::Path, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let metadata_items = upstream_metadata_stream(path, trust_package); let metadata_items = metadata_items.filter_map(|x| async { match x { Ok(x) => Some(x), Err(e) => { log::error!("{}", e); None } } }); summarize_upstream_metadata( metadata_items, path, net_access, consult_external_directory, check, ) .await } /// Guess the upstream metadata dictionary. /// /// # Arguments /// * `path`: Path to the package /// * `trust_package`: Whether to trust the package contents and i.e. run executables in it /// * `net_access`: Whether to allow net access /// * `consult_external_directory`: Whether to pull in data from external (user-maintained) directories. pub async fn guess_upstream_metadata( path: &std::path::Path, trust_package: Option, net_access: Option, consult_external_directory: Option, check: Option, ) -> Result { let metadata_items = guess_upstream_metadata_items(path, trust_package, None); let metadata_items = metadata_items.filter_map(|x| async { match x { Ok(x) => Some(x), Err(e) => { log::error!("{}", e); None } } }); summarize_upstream_metadata( metadata_items, path, net_access, consult_external_directory, check, ) .await } pub async fn verify_screenshots(urls: &[&str]) -> Vec<(String, Option)> { let mut ret = Vec::new(); for url in urls { let mut request = reqwest::Request::new(reqwest::Method::GET, url.parse().unwrap()); request.headers_mut().insert( reqwest::header::USER_AGENT, reqwest::header::HeaderValue::from_static(USER_AGENT), ); match reqwest::Client::new().execute(request).await { Ok(response) => { let status = response.status(); if status.is_success() { ret.push((url.to_string(), Some(true))); } else if status.is_client_error() { ret.push((url.to_string(), Some(false))); } else { ret.push((url.to_string(), None)); } } Err(e) => { log::debug!("Error fetching {}: {}", url, e); ret.push((url.to_string(), None)); } } } ret } /// Check upstream metadata. /// /// This will make network connections, etc. pub async fn check_upstream_metadata( upstream_metadata: &mut UpstreamMetadata, version: Option<&str>, ) { let repository = upstream_metadata.get_mut("Repository"); if let Some(repository) = repository { match vcs::check_repository_url_canonical(repository.datum.to_url().unwrap(), version).await { Ok(canonical_url) => { repository.datum = UpstreamDatum::Repository(canonical_url.to_string()); if repository.certainty == Some(Certainty::Confident) { repository.certainty = Some(Certainty::Certain); } let derived_browse_url = vcs::browse_url_from_repo_url( &vcs::VcsLocation { url: repository.datum.to_url().unwrap(), branch: None, subpath: None, }, Some(true), ) .await; let certainty = repository.certainty; let browse_repo = upstream_metadata.get_mut("Repository-Browse"); if browse_repo.is_some() && derived_browse_url == browse_repo.as_ref().and_then(|u| u.datum.to_url()) { browse_repo.unwrap().certainty = certainty; } } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unverifiable URL: {}", u); } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Repository URL {}: {}", u, e); upstream_metadata.remove("Repository"); } } } let homepage = upstream_metadata.get_mut("Homepage"); if let Some(homepage) = homepage { match check_url_canonical(&homepage.datum.to_url().unwrap()).await { Ok(canonical_url) => { homepage.datum = UpstreamDatum::Homepage(canonical_url.to_string()); if homepage.certainty >= Some(Certainty::Likely) { homepage.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unverifiable URL: {}", u); } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Homepage URL {}: {}", u, e); upstream_metadata.remove("Homepage"); } } } if let Some(repository_browse) = upstream_metadata.get_mut("Repository-Browse") { match check_url_canonical(&repository_browse.datum.to_url().unwrap()).await { Ok(u) => { repository_browse.datum = UpstreamDatum::RepositoryBrowse(u.to_string()); if repository_browse.certainty >= Some(Certainty::Likely) { repository_browse.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Repository-Browse URL {}: {}", u, e); upstream_metadata.remove("Repository-Browse"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Repository-Browse URL {}", u); } } } if let Some(bug_database) = upstream_metadata.get_mut("Bug-Database") { match check_bug_database_canonical(&bug_database.datum.to_url().unwrap(), Some(true)).await { Ok(u) => { bug_database.datum = UpstreamDatum::BugDatabase(u.to_string()); if bug_database.certainty >= Some(Certainty::Likely) { bug_database.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Bug-Database URL {}: {}", u, e); upstream_metadata.remove("Bug-Database"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Bug-Database URL {}", u); } } } let bug_submit = upstream_metadata.get_mut("Bug-Submit"); if let Some(bug_submit) = bug_submit { match check_bug_submit_url_canonical(&bug_submit.datum.to_url().unwrap(), Some(true)).await { Ok(u) => { bug_submit.datum = UpstreamDatum::BugSubmit(u.to_string()); if bug_submit.certainty >= Some(Certainty::Likely) { bug_submit.certainty = Some(Certainty::Certain); } } Err(CanonicalizeError::InvalidUrl(u, e)) => { log::debug!("Deleting invalid Bug-Submit URL {}: {}", u, e); upstream_metadata.remove("Bug-Submit"); } Err(CanonicalizeError::Unverifiable(u, _)) | Err(CanonicalizeError::RateLimited(u)) => { log::debug!("Unable to verify Bug-Submit URL {}", u); } } } let mut screenshots = upstream_metadata.get_mut("Screenshots"); if screenshots.is_some() && screenshots.as_ref().unwrap().certainty == Some(Certainty::Likely) { let mut newvalue = vec![]; screenshots.as_mut().unwrap().certainty = Some(Certainty::Certain); let urls = match &screenshots.as_ref().unwrap().datum { UpstreamDatum::Screenshots(urls) => urls, _ => unreachable!(), }; for (url, status) in verify_screenshots( urls.iter() .map(|x| x.as_str()) .collect::>() .as_slice(), ) .await { match status { Some(true) => { newvalue.push(url); } Some(false) => {} None => { screenshots.as_mut().unwrap().certainty = Some(Certainty::Likely); } } } screenshots.as_mut().unwrap().datum = UpstreamDatum::Screenshots(newvalue); } } #[async_trait::async_trait] pub(crate) trait Guesser { fn name(&self) -> &str; /// Guess metadata from a given path. async fn guess( &mut self, settings: &GuesserSettings, ) -> Result, ProviderError>; fn stream( &mut self, settings: &GuesserSettings, ) -> Pin> + Send>> { let metadata = match futures::executor::block_on(self.guess(settings)) { Ok(metadata) => metadata, Err(e) => return futures::stream::once(async { Err(e) }).boxed(), }; Box::pin(futures::stream::iter(metadata.into_iter().map(Ok))) } } pub struct PathGuesser { name: String, subpath: std::path::PathBuf, cb: Box< dyn FnMut( PathBuf, GuesserSettings, ) -> Pin< Box< dyn std::future::Future< Output = Result, ProviderError>, > + Send, >, > + Send, >, } #[async_trait::async_trait] impl Guesser for PathGuesser { fn name(&self) -> &str { &self.name } async fn guess( &mut self, settings: &GuesserSettings, ) -> Result, ProviderError> { (self.cb)(self.subpath.clone(), settings.clone()).await } } pub struct EnvironmentGuesser; impl EnvironmentGuesser { pub fn new() -> Self { Self } } impl Default for EnvironmentGuesser { fn default() -> Self { Self::new() } } #[async_trait::async_trait] impl Guesser for EnvironmentGuesser { fn name(&self) -> &str { "environment" } async fn guess( &mut self, _settings: &GuesserSettings, ) -> Result, ProviderError> { crate::guess_from_environment() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_upstream_metadata() { let mut data = UpstreamMetadata::new(); assert_eq!(data.len(), 0); data.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://example.com".to_string()), certainty: Some(Certainty::Certain), origin: None, }); assert_eq!(data.len(), 1); assert_eq!( data.get("Homepage").unwrap().datum.as_str().unwrap(), "https://example.com" ); assert_eq!(data.homepage(), Some("https://example.com")); } #[tokio::test] async fn test_bug_database_url_from_bug_submit_url() { let url = Url::parse("https://bugs.launchpad.net/bugs/+filebug").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None) .await .unwrap(), Url::parse("https://bugs.launchpad.net/bugs").unwrap() ); let url = Url::parse("https://github.com/dulwich/dulwich/issues/new").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None) .await .unwrap(), Url::parse("https://github.com/dulwich/dulwich/issues").unwrap() ); let url = Url::parse("https://sourceforge.net/p/dulwich/bugs/new").unwrap(); assert_eq!( bug_database_url_from_bug_submit_url(&url, None) .await .unwrap(), Url::parse("https://sourceforge.net/p/dulwich/bugs").unwrap() ); } #[test] fn test_person_from_str() { assert_eq!( Person::from("Foo Bar "), Person { name: Some("Foo Bar".to_string()), email: Some("foo@example.com".to_string()), url: None } ); assert_eq!( Person::from("Foo Bar"), Person { name: Some("Foo Bar".to_string()), email: None, url: None } ); assert_eq!( Person::from("foo@example.com"), Person { name: None, email: Some("foo@example.com".to_string()), url: None } ); } } upstream-ontologist-0.2.4/src/providers/arch.rs000064400000000000000000000140471046102023000177630ustar 00000000000000use crate::{vcs, UpstreamDatum, USER_AGENT}; use log::{debug, error}; use std::collections::HashMap; use std::io::BufRead; pub fn parse_pkgbuild_variables(file: &str) -> HashMap> { let reader = std::io::Cursor::new(file); let mut variables = HashMap::new(); let mut keep: Option<(String, String)> = None; let mut existing: Option = None; for line in reader.lines() { let line = line.expect("Failed to read line"); if let Some(existing_line) = existing.take() { let line = [&existing_line[..existing_line.len() - 2], &line].concat(); existing = Some(line); continue; } if line.ends_with("\\\n") { existing = Some(line[..line.len() - 2].to_owned()); continue; } if line.starts_with('\t') || line.starts_with(' ') || line.starts_with('#') { continue; } if let Some((key, mut value)) = keep.take() { value.push_str(&line); if line.trim_end().ends_with(')') { let value_parts = match shlex::split(value.as_str()) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value.as_str()); continue; } }; variables.insert(key, value_parts); } else { keep = Some((key, value)); } continue; } if let Some((key, value)) = line.split_once('=') { if let Some(value) = value.strip_prefix('(') { if value.trim_end().ends_with(')') { let value = &value[0..value.len() - 1]; let value_parts = match shlex::split(value) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value); continue; } }; variables.insert(key.to_owned(), value_parts); } else { keep = Some((key.to_owned(), value.to_owned())); } } else { let value_parts = match shlex::split(value) { Some(value_parts) => value_parts, None => { error!("Failed to split value: {}", value); continue; } }; variables.insert(key.to_owned(), value_parts); } } } variables } pub async fn guess_from_aur(package: &str) -> Vec { let mut variables = HashMap::new(); for vcs in vcs::VCSES { let url = format!( "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h={}-{}", package, vcs ); let mut headers = reqwest::header::HeaderMap::new(); headers.insert(reqwest::header::USER_AGENT, USER_AGENT.parse().unwrap()); let client = reqwest::Client::builder() .default_headers(headers) .build() .unwrap(); debug!("Requesting {}", url); let response = client.get(&url).send().await; match response { Ok(response) => { if response.status().is_success() { let text = response.text().await.unwrap(); variables = parse_pkgbuild_variables(&text); break; } else if response.status().as_u16() != 404 { // If the response is not 404, raise an error // response.error_for_status(); error!("Error contacting AUR: {}", response.status()); return Vec::new(); } else { continue; } } Err(e) => { error!("Error contacting AUR: {}", e); return Vec::new(); } } } let mut results = Vec::new(); for (key, value) in variables.iter() { match key.as_str() { "url" => { results.push(UpstreamDatum::Homepage(value[0].to_owned())); } "source" => { if value.is_empty() { continue; } let mut value = value[0].to_owned(); if value.contains("${") { for (k, v) in variables.iter() { value = value.replace(format!("${{{}}}", k).as_str(), v.join(" ").as_str()); value = value.replace(format!("${}", k).as_str(), v.join(" ").as_str()); } } let url = match value.split_once("::") { Some((_unique_name, url)) => url, None => value.as_str(), }; let url = url.replace("#branch=", ",branch="); results.push(UpstreamDatum::Repository( vcs::strip_vcs_prefixes(url.as_str()).to_owned(), )); } "_gitroot" => { results.push(UpstreamDatum::Repository( vcs::strip_vcs_prefixes(value[0].as_str()).to_owned(), )); } _ => { debug!("Ignoring variable: {}", key); } } } results } pub struct Aur; impl Default for Aur { fn default() -> Self { Self::new() } } impl Aur { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Aur { fn name(&self) -> &'static str { "AUR" } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository"] } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Possible } async fn guess_metadata(&self, name: &str) -> Result, crate::ProviderError> { Ok(guess_from_aur(name).await) } } upstream-ontologist-0.2.4/src/providers/authors.rs000064400000000000000000000032171046102023000205300ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use std::fs::File; use std::io::BufRead; use std::path::Path; pub fn guess_from_authors( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = std::io::BufReader::new(file); let mut authors: Vec = Vec::new(); for line in reader.lines().map_while(Result::ok) { let mut m = line.trim().to_string(); if m.is_empty() { continue; } if m.starts_with("arch-tag: ") { continue; } if m.ends_with(':') { continue; } if m.starts_with("$Id") { continue; } if m.starts_with('*') || m.starts_with('-') { m = m[1..].trim().to_string(); } if m.len() < 3 { continue; } if m.ends_with('.') { continue; } if m.contains(" for ") { let parts: Vec<&str> = m.split(" for ").collect(); m = parts[0].to_string(); } if !m.chars().next().unwrap().is_alphabetic() { continue; } if !m.contains('<') && line.as_bytes().starts_with(b"\t") { continue; } if m.contains('<') || m.matches(' ').count() < 5 { authors.push(Person::from(m.as_str())); } } Ok(vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Likely), origin: Some(path.into()), }]) } upstream-ontologist-0.2.4/src/providers/autoconf.rs000064400000000000000000000104471046102023000206640ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use url::Url; fn is_email_address(email: &str) -> bool { if email.contains('@') { return true; } if email.contains(" (at) ") { return true; } false } pub fn guess_from_configure( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { if std::path::Path::new(path).is_dir() { return Ok(Vec::new()); } let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); for line in reader.split(b'\n').map_while(Result::ok) { let split = line.splitn(2, |&c| c == b'=').collect::>(); let (key, value) = if let [key, value] = split.as_slice() { (key, value) } else { continue; }; let key = String::from_utf8(key.to_vec()).expect("Failed to parse UTF-8"); let key = key.trim(); let value = String::from_utf8(value.to_vec()).expect("Failed to parse UTF-8"); let mut value = value.trim(); if key.contains(' ') { continue; } if value.contains('$') { continue; } if value.starts_with('\'') && value.ends_with('\'') { value = &value[1..value.len() - 1]; if value.is_empty() { continue; } } match key { "PACKAGE_NAME" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_TARNAME" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_VERSION" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "PACKAGE_BUGREPORT" => { let certainty = if value == "BUG-REPORT-ADDRESS" { None } else if is_email_address(value) { // Downgrade the trustworthiness of this field for most // upstreams if it contains an e-mail address. Most // upstreams seem to just set this to some random address, // and then forget about it. Some(Certainty::Possible) } else if value.contains("mailing list") { // Downgrade the trustworthiness of this field if // it contains a mailing list Some(Certainty::Possible) } else { let parsed_url = Url::parse(value).expect("Failed to parse URL"); if parsed_url.path().trim_end_matches('/') != "" { Some(Certainty::Certain) } else { // It seems unlikely that the bug submit URL lives at // the root. Some(Certainty::Possible) } }; if certainty.is_some() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugSubmit(value.to_string()), certainty, origin: Some(path.into()), }); } } "PACKAGE_URL" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => { debug!("unknown key: {}", key); } } } Ok(results) } upstream-ontologist-0.2.4/src/providers/composer_json.rs000064400000000000000000000067721046102023000217340ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::error; use std::path::Path; pub fn guess_from_composer_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // https://getcomposer.org/doc/04-schema.md let file = std::fs::File::open(path)?; let package: serde_json::Value = serde_json::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); let package = match package.as_object() { Some(package) => package, None => { return Err(ProviderError::Other( "Failed to parse composer.json".to_string(), )) } }; for (field, value) in package { match field.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "type" => { if value != "project" { error!("unexpected composer.json type: {:?}", value); } } "keywords" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords( value .as_array() .unwrap() .iter() .map(|v| v.as_str().unwrap().to_string()) .collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "require" | "require-dev" | "autoload" | "autoload-dev" | "scripts" | "extra" | "config" | "prefer-stable" | "minimum-stability" => { // Do nothing, skip these fields } _ => { error!("Unknown field {} ({:?}) in composer.json", field, value); } } } Ok(upstream_data) } upstream-ontologist-0.2.4/src/providers/debian.rs000064400000000000000000000706271046102023000202760ustar 00000000000000use crate::{ bug_database_from_issue_url, repo_url_from_merge_request_url, Certainty, GuesserSettings, Origin, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use lazy_regex::regex_captures; use log::debug; use std::fs::File; use std::io::BufRead; use std::io::Read; use std::path::Path; use url::Url; pub async fn guess_from_debian_patch( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = std::io::BufReader::new(file); let net_access = None; let mut upstream_data: Vec = Vec::new(); for line in reader.lines().map_while(Result::ok) { if line.starts_with("Forwarded: ") { let forwarded = match line.split_once(':') { Some((_, url)) => url.trim(), None => { debug!("Malformed Forwarded line in patch {}", path.display()); continue; } }; let forwarded = match Url::parse(forwarded) { Ok(url) => url, Err(e) => { debug!( "Malformed URL in Forwarded line in patch {}: {}", path.display(), e ); continue; } }; if let Some(bug_db) = bug_database_from_issue_url(&forwarded, net_access).await { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_db.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(repo_url) = repo_url_from_merge_request_url(&forwarded, net_access).await { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Ok(upstream_data) } pub fn metadata_from_itp_bug_body( body: &str, origin: Option, ) -> std::result::Result, ProviderError> { let mut results: Vec = Vec::new(); // Skip first few lines with bug metadata (severity, owner, etc) let mut line_iter = body.split_terminator('\n'); let mut next_line = line_iter.next(); while let Some(line) = next_line { if next_line.is_none() { return Err(ProviderError::ParseError( "ITP bug body ended before package name".to_string(), )); } next_line = line_iter.next(); if line.trim().is_empty() { break; } } while let Some(line) = next_line { if next_line.is_none() { return Err(ProviderError::ParseError( "ITP bug body ended before package name".to_string(), )); } if !line.is_empty() { break; } next_line = line_iter.next(); } while let Some(mut line) = next_line { line = line.trim_start_matches('*').trim_start(); if line.is_empty() { break; } match line.split_once(':') { Some((key, value)) => { let key = key.trim(); let value = value.trim(); match key { "Package name" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "Version" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Possible), origin: origin.clone(), }); } "Upstream Author" if !value.is_empty() => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from(value)]), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "URL" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "License" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } "Description" => { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Confident), origin: origin.clone(), }); } _ => { debug!("Unknown pseudo-header {} in ITP bug body", key); } } } _ => { debug!("Ignoring non-semi-field line {}", line); } } next_line = line_iter.next(); } let mut rest: Vec = Vec::new(); for line in line_iter { if line.trim() == "-- System Information:" { break; } rest.push(line.to_string()); } results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(rest.join("\n")), certainty: Some(Certainty::Likely), origin: origin.clone(), }); Ok(results) } #[test] fn test_metadata_from_itp_bug_body() { assert_eq!( vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("setuptools-gettext".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("0.0.1".to_string()), certainty: Some(Certainty::Possible), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from("Breezy Team ")]), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://github.com/jelmer/setuptools-gettext".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("GPL".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary("Compile .po files into .mo files".to_string()), certainty: Some(Certainty::Confident), origin: None, }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Description("This extension for setuptools compiles gettext .po files\nfound in the source directory into .mo files and installs them.\n".to_string()), certainty: Some(Certainty::Likely), origin: None, }, ], metadata_from_itp_bug_body( r#"Package: wnpp Severity: wishlist Owner: Jelmer Vernooij Debbugs-Cc: debian-devel@lists.debian.org * Package name : setuptools-gettext Version : 0.0.1 Upstream Author : Breezy Team * URL : https://github.com/jelmer/setuptools-gettext * License : GPL Programming Lang: Python Description : Compile .po files into .mo files This extension for setuptools compiles gettext .po files found in the source directory into .mo files and installs them. "#, None ) .unwrap() ); } #[cfg(feature = "debian")] fn read_changelog_first_entry( path: &Path, ) -> Result<(String, Option, Vec), ProviderError> { let cl = debian_changelog::ChangeLog::read_path(path).map_err(|e| { ProviderError::ParseError(format!( "Failed to parse changelog {}: {}", path.display(), e )) })?; let entry = cl .iter() .next() .ok_or_else(|| ProviderError::ParseError("Empty changelog".to_string()))?; let package = entry.package().ok_or_else(|| { ProviderError::ParseError(format!("Changelog {} has no package name", path.display())) })?; let version = entry.version(); let change_lines = entry.change_lines().collect::>(); Ok((package.to_string(), version, change_lines)) } #[cfg(feature = "debian")] pub async fn guess_from_debian_changelog( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let (package, version, change_lines) = read_changelog_first_entry(path)?; let mut ret = Vec::new(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(package.clone()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); if let Some(version) = version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.upstream_version), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } #[cfg(feature = "debcargo")] if package.starts_with("rust-") { let debcargo_toml_path = path.parent().unwrap().join("debcargo.toml"); let debcargo_config = debcargo::config::Config::parse(debcargo_toml_path.as_path()) .map_err(|e| { ProviderError::ParseError(format!( "Failed to parse debcargo config {}: {}", path.display(), e )) })?; let semver_suffix = debcargo_config.semver_suffix; let (mut crate_name, _crate_semver_version) = parse_debcargo_source_name(&package, semver_suffix); if crate_name.contains('-') { crate_name = match crate::providers::rust::cargo_translate_dashes(crate_name.as_str()) .await .map_err(|e| { ProviderError::Other(format!( "Failed to translate dashes in crate name {}: {}", crate_name, e )) })? { Some(name) => name, None => { return Err(ProviderError::Other(format!( "Failed to translate dashes in crate name {}", crate_name ))) } }; } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("crates.io".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate(crate_name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(itp) = find_itp(&change_lines) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::DebianITP(itp), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.extend(guess_from_itp_bug(itp)?); } Ok(ret) } pub fn find_itp(changes: &[String]) -> Option { for line in changes { if let Some((_, itp)) = regex_captures!(r"\* Initial release. \(?Closes: #(\d+)\)?", line) { return Some(itp.parse().unwrap()); } } None } pub fn guess_from_itp_bug( bugno: i32, ) -> std::result::Result, ProviderError> { let debbugs = debbugs::blocking::Debbugs::default(); let log = debbugs.get_bug_log(bugno).map_err(|e| { ProviderError::ParseError(format!("Failed to get bug log for bug {}: {}", bugno, e)) })?; metadata_from_itp_bug_body( log[0].body.as_str(), Some(Origin::Other(format!("Debian bug #{}", bugno))), ) } /// Parse a debcargo source name and return crate. /// /// # Arguments /// * `source_name` - Source package name /// * `semver_suffix` - Whether semver_suffix is enabled /// /// # Returns /// tuple with crate name and optional semver pub fn parse_debcargo_source_name( source_name: &str, semver_suffix: bool, ) -> (String, Option) { let mut crate_name = source_name.strip_prefix("rust-").unwrap(); match crate_name.rsplitn(2, '-').collect::>().as_slice() { [semver, new_crate_name] if semver_suffix => { crate_name = new_crate_name; (crate_name.to_string(), Some(semver.to_string())) } _ => (crate_name.to_string(), None), } } #[cfg(feature = "debian")] pub fn guess_from_debian_rules( path: &Path, _settings: &GuesserSettings, ) -> Result, ProviderError> { let f = std::fs::File::open(path)?; let mf = makefile_lossless::Makefile::read_relaxed(f) .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/rules: {}", e)))?; let mut ret = vec![]; if let Some(variable) = mf .variable_definitions() .find(|v| v.name().as_deref() == Some("DEB_UPSTREAM_GIT")) { let certainty = Some(Certainty::Likely); let datum = UpstreamDatum::Repository(variable.raw_value().unwrap()); ret.push(UpstreamDatumWithMetadata { datum, certainty, origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(deb_upstream_url) = mf .variable_definitions() .find(|v| v.name().as_deref() == Some("DEB_UPSTREAM_URL")) { let certainty = Some(Certainty::Likely); let datum = UpstreamDatum::Download(deb_upstream_url.raw_value().unwrap()); ret.push(UpstreamDatumWithMetadata { datum, certainty, origin: Some(Origin::Path(path.to_path_buf())), }); } Ok(ret) } #[cfg(feature = "debian")] pub fn guess_from_debian_control( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; use std::str::FromStr; let control = debian_control::Control::from_str(&std::fs::read_to_string(path)?) .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/control: {}", e)))?; let source = control.source().unwrap(); let is_native = debian_is_native(path.parent().unwrap()).map_err(|e| { ProviderError::ParseError(format!("Failed to parse debian/source/format: {}", e)) })?; if let Some(homepage) = source.homepage() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(go_import_path) = source.as_deb822().get("XS-Go-Import-Path") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(go_import_path.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://{}", go_import_path)), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } if is_native == Some(true) { if let Some(vcs_git) = source.vcs_git() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(vcs_git), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(vcs_browser) = source.vcs_browser() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(vcs_browser), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } let binaries = control.binaries().collect::>(); let certainty = if binaries.len() == 1 && is_native == Some(true) { // Debian native package with only one binary package Certainty::Certain } else if binaries.len() > 1 && is_native == Some(true) { Certainty::Possible } else if binaries.len() == 1 && is_native == Some(false) { // Debian non-native package with only one binary package, so description is likely to be // good but might be Debian-specific Certainty::Confident } else { Certainty::Likely }; for binary in binaries { if let Some(description) = binary.description() { let lines = description.split('\n').collect::>(); let mut summary = lines[0].to_string(); let mut description_lines = &lines[1..]; if !description_lines.is_empty() && description_lines .last() .unwrap() .starts_with("This package contains") { summary = summary .split(" - ") .next() .unwrap_or(summary.as_str()) .to_string(); description_lines = description_lines.split_last().unwrap().1; } if !summary.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary), certainty: Some(certainty), origin: Some(path.into()), }); } if !description_lines.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description_lines.join("\n")), certainty: Some(certainty), origin: Some(path.into()), }); } } } Ok(ret) } #[cfg(feature = "debian")] pub async fn guess_from_debian_copyright( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use debian_copyright::lossless::{Copyright, Error}; let mut ret = vec![]; let text = &std::fs::read_to_string(path)?; let mut urls = vec![]; match Copyright::from_str_relaxed(text) { Ok((c, _)) => { let header = c.header().unwrap(); if let Some(upstream_name) = header.upstream_name() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(upstream_name.to_string()), certainty: Some(if upstream_name.contains(' ') { Certainty::Confident } else { Certainty::Certain }), origin: Some(path.into()), }); } if let Some(upstream_contact) = header.upstream_contact() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(upstream_contact), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(source) = header.source() { if source.contains(' ') { urls.extend( source .split(|c| c == ' ' || c == '\n' || c == ',') .filter(|s| !s.is_empty()) .map(|s| s.to_string()), ); } else { urls.push(source.clone()); } for (m, _, _) in lazy_regex::regex_captures!(r"(http|https)://([^ ,]+)", source.as_str()) { urls.push(m.to_string()); } } if let Some(upstream_bugs) = header.as_deb822().get("X-Upstream-Bugs") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(upstream_bugs), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(source_downloaded_from) = header.as_deb822().get("X-Source-Downloaded-From") { if let Ok(url) = source_downloaded_from.parse::() { urls.push(url.to_string()); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(source_downloaded_from), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } let referenced_licenses = c .iter_licenses() .filter_map(|l| l.name()) .collect::>(); if referenced_licenses.len() == 1 { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(referenced_licenses.into_iter().next().unwrap()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Err(Error::IoError(e)) => { unreachable!("IO error: {}", e); } Err(Error::ParseError(e)) => { return Err(ProviderError::ParseError(e.to_string())); } Err(Error::NotMachineReadable) => { for line in text.lines() { if let Some(name) = line.strip_prefix("Upstream-Name: ") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Possible), origin: Some(Origin::Path(path.into())), }); } if let Some(url) = lazy_regex::regex_find!(r".* was downloaded from ([^\s]+)", line) { urls.push(url.to_string()); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } } for url in urls.into_iter() { if let Ok(url) = url.parse() { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url, None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } ret.extend(crate::metadata_from_url( url.as_str(), &Origin::Path(path.into()), )); } Ok(ret) } #[cfg(feature = "debian")] fn read_entries(path: &Path) -> Result, ProviderError> { use debian_changelog::ChangeLog; let get_package_name = || -> String { let text = std::fs::read_to_string(path.parent().unwrap().join("changelog")).unwrap(); let cl: ChangeLog = text.parse().unwrap(); let first_entry = cl.iter().next().unwrap(); first_entry.package().unwrap() }; let w: debian_watch::WatchFile = std::fs::read_to_string(path)? .parse() .map_err(|e| ProviderError::ParseError(format!("Failed to parse debian/watch: {}", e)))?; let entries = w .entries() .map(|e| (e.format_url(get_package_name), e.mode().unwrap_or_default())) .collect::>(); Ok(entries) } #[cfg(feature = "debian")] pub async fn guess_from_debian_watch( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut ret = vec![]; use debian_watch::Mode; let entries = read_entries(path)?; let origin = Origin::Path(path.into()); for (url, mode) in entries { match mode { Mode::Git => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } Mode::Svn => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } Mode::LWP => { if url.scheme() == "http" || url.scheme() == "https" { let url = url.clone(); if let Some(repo) = crate::vcs::guess_repo_from_url(&url, None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } } } }; ret.extend(crate::metadata_from_url(url.as_str(), &origin)); } Ok(ret) } #[cfg(feature = "debian")] pub fn debian_is_native(path: &Path) -> std::io::Result> { let format_file_path = path.join("source/format"); match File::open(format_file_path) { Ok(mut file) => { let mut content = String::new(); file.read_to_string(&mut content)?; return Ok(Some(content.trim() == "3.0 (native)")); } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(e), } let changelog_file = path.join("changelog"); match File::open(changelog_file) { Ok(mut file) => { let cl = debian_changelog::ChangeLog::read(&mut file) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; let first_entry = cl.iter().next().unwrap(); let version = first_entry.version().unwrap(); return Ok(Some(version.debian_revision.is_none())); } Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) => return Err(e), } Ok(None) } #[cfg(test)] mod watch_tests { use super::*; #[cfg(feature = "debian")] #[tokio::test] async fn test_empty() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("watch"); std::fs::write( &path, r#" # Blah "#, ) .unwrap(); assert!(guess_from_debian_watch(&path, &GuesserSettings::default()) .await .unwrap() .is_empty()); } #[cfg(feature = "debian")] #[tokio::test] async fn test_simple() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("watch"); std::fs::write( &path, r#"version=4 https://github.com/jelmer/dulwich/tags/dulwich-(.*).tar.gz "#, ) .unwrap(); assert_eq!( vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository("https://github.com/jelmer/dulwich".to_string()), certainty: Some(Certainty::Confident), origin: Some(path.clone().into()) }], guess_from_debian_watch(&path, &GuesserSettings::default()) .await .unwrap() ); } } upstream-ontologist-0.2.4/src/providers/doap.rs000064400000000000000000000244071046102023000177720ustar 00000000000000//! See https://github.com/ewilderj/doap use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::error; use std::fs::File; use std::path::Path; pub fn guess_from_doap( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path).expect("Failed to open file"); let doc = Element::parse(file).expect("Failed to parse XML"); let mut root = &doc; let mut results: Vec = Vec::new(); const DOAP_NAMESPACE: &str = "http://usefulinc.com/ns/doap#"; const RDF_NAMESPACE: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; const SCHEMA_NAMESPACE: &str = "https://schema.org/"; if root.name == "RDF" && root.namespace.as_deref() == Some(RDF_NAMESPACE) { for child in root.children.iter() { if let Some(element) = child.as_element() { root = element; break; } } } if root.name != "Project" || root.namespace.as_deref() != Some(DOAP_NAMESPACE) { return Err(ProviderError::ParseError(format!( "Doap file does not have DOAP project as root, but {}", root.name ))); } fn extract_url(el: &Element) -> Option<&str> { el.attributes.get("resource").map(|url| url.as_str()) } fn extract_lang(el: &Element) -> Option<&str> { el.attributes.get("lang").map(|lang| lang.as_str()) } let mut screenshots: Vec = Vec::new(); let mut maintainers: Vec = Vec::new(); for child in &root.children { let child = if let Some(element) = child.as_element() { element } else { continue; }; match (child.namespace.as_deref(), child.name.as_str()) { (Some(DOAP_NAMESPACE), "name") => { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "shortname") | (Some(DOAP_NAMESPACE), "short-name") => { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "bug-database") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "homepage") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "download-page") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "shortdesc") => { if let Some(lang) = extract_lang(child) { if lang == "en" { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } (Some(DOAP_NAMESPACE), "description") => { if let Some(lang) = extract_lang(child) { if lang == "en" { if let Some(text) = &child.get_text() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(text.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } (Some(DOAP_NAMESPACE), "license") => { // TODO: Handle license } (Some(DOAP_NAMESPACE), "repository") => { for repo in &child.children { let repo = if let Some(element) = repo.as_element() { element } else { continue; }; match repo.name.as_str() { "SVNRepository" | "GitRepository" => { if let Some(repo_location) = repo.get_child("location") { if let Some(repo_url) = extract_url(repo_location) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(web_location) = repo.get_child("browse") { if let Some(web_url) = extract_url(web_location) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(web_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } _ => (), } } } (Some(DOAP_NAMESPACE), "category") | (Some(DOAP_NAMESPACE), "programming-language") | (Some(DOAP_NAMESPACE), "os") | (Some(DOAP_NAMESPACE), "implements") | (Some(SCHEMA_NAMESPACE), "logo") | (Some(DOAP_NAMESPACE), "platform") => { // TODO: Handle other tags } (Some(SCHEMA_NAMESPACE), "screenshot") | (Some(DOAP_NAMESPACE), "screenshots") => { if let Some(url) = extract_url(child) { screenshots.push(url.to_string()); } } (Some(DOAP_NAMESPACE), "wiki") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Wiki(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "maintainer") => { for person in &child.children { let person = if let Some(element) = person.as_element() { element } else { continue; }; if person.name != "Person" { continue; } let name = if let Some(name_tag) = person.get_child("name") { name_tag.get_text().clone() } else { None }; let email = if let Some(email_tag) = person.get_child("mbox") { email_tag.get_text().as_ref().cloned() } else { None }; let url = if let Some(email_tag) = person.get_child("mbox") { extract_url(email_tag).map(|url| url.to_string()) } else { None }; maintainers.push(Person { name: name.map(|n| n.to_string()), email: email.map(|n| n.to_string()), url, }); } } (Some(DOAP_NAMESPACE), "mailing-list") => { if let Some(url) = extract_url(child) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::MailingList(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } (Some(DOAP_NAMESPACE), "release") => {} _ => { error!("Unknown tag {} in DOAP file", child.name); } } } if maintainers.len() == 1 { let maintainer = maintainers.remove(0); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { for maintainer in maintainers { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.2.4/src/providers/git.rs000064400000000000000000000030261046102023000176240ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::path::Path; #[cfg(feature = "git-config")] pub fn guess_from_git_config( path: &Path, settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let config_file = gix_config::File::from_path_no_includes(path.to_path_buf(), gix_config::Source::Local) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results = Vec::new(); // Check if there's a remote named "upstream" if let Some(remote_upstream) = config_file.string_by("remote", Some("upstream".into()), "url") { let url = remote_upstream.to_string(); if !url.starts_with("../") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } // Check if there's a remote named "origin" if !settings.trust_package { if let Some(remote_origin) = config_file.string_by("remote", Some("origin".into()), "url") { let url = remote_origin.to_string(); if !url.starts_with("../") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } Ok(results) } upstream-ontologist-0.2.4/src/providers/go.rs000064400000000000000000000035301046102023000174460ustar 00000000000000//! See https://golang.org/doc/modules/gomod-ref use crate::{ Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use log::debug; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn guess_from_go_mod( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path).expect("Failed to open file"); let reader = BufReader::new(file); let mut results = Vec::new(); for line in reader.lines().map_while(Result::ok) { if line.starts_with("module ") { let modname = match line.trim().split_once(' ') { Some((_, modname)) => modname, None => { debug!("Failed to parse module name from line: {}", line); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(modname.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } pub fn remote_go_metadata(package: &str) -> Result { let mut ret = UpstreamMetadata::default(); if package.starts_with("github.com/") { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::GoImportPath(package.to_string()), certainty: Some(Certainty::Certain), origin: None, }); let parts: Vec<&str> = package.split('/').collect(); ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://{}", parts[..3].join("/"))), certainty: Some(Certainty::Certain), origin: None, }); } Ok(ret) } upstream-ontologist-0.2.4/src/providers/gobo.rs000064400000000000000000000114001046102023000177620ustar 00000000000000use crate::UpstreamDatum; #[allow(dead_code)] #[derive(serde::Deserialize)] struct Contents { name: String, path: String, sha: String, size: u64, url: url::Url, html_url: url::Url, git_url: url::Url, download_url: Option, r#type: String, content: Option, encoding: Option, _links: Links, } #[allow(dead_code)] #[derive(serde::Deserialize)] struct Links { #[serde(rename = "self")] self_: String, git: url::Url, html: url::Url, } pub async fn guess_from_gobo(package: &str) -> Result, crate::ProviderError> { let packages_url = "https://api.github.com/repos/gobolinux/Recipes/contents" .parse() .unwrap(); let contents: Vec = serde_json::from_value(crate::load_json_url(&packages_url, None).await?).unwrap(); let package = match contents .iter() .find(|p| p.name.to_ascii_lowercase() == package.to_ascii_lowercase()) { Some(p) => p, None => { log::debug!("No gobo package named {}", package); return Ok(Vec::new()); } }; let versions: Vec = serde_json::from_value(crate::load_json_url(&package.url, None).await?).unwrap(); let last_version = if let Some(last_version) = versions.last() { &last_version.name } else { log::debug!("No versions for gobo package {}", package.name); return Ok(Vec::new()); }; let base_url: url::Url = format!( "https://raw.githubusercontent.com/gobolinux/Recipes/master/{}/{}/", package.name, last_version ) .parse() .unwrap(); let client = reqwest::Client::builder() .user_agent(crate::USER_AGENT) .build() .unwrap(); let mut result = Vec::new(); let recipe_url = base_url.join("Recipe").unwrap(); match client.get(recipe_url.as_ref()).send().await { Ok(response) => { let text = response.text().await.unwrap(); for line in text.lines() { if let Some(url) = line.strip_prefix("url=") { result.push(UpstreamDatum::Homepage(url.to_string())); } } } Err(e) => { if e.status() == Some(reqwest::StatusCode::NOT_FOUND) { log::error!("No recipe for existing gobo package {}", package.name); } else if e.status() == Some(reqwest::StatusCode::FORBIDDEN) { log::debug!("error loading {}: {}. rate limiting?", recipe_url, e); } else { return Err(crate::ProviderError::Other(e.to_string())); } } } let description_url = base_url.join("Resources/Description").unwrap(); match client.get(description_url.as_ref()).send().await { Ok(response) => { for line in response.text().await.unwrap().lines() { if let Some((_, key, value)) = lazy_regex::regex_captures!("\\[(.*)\\] (.*)", line) { match key { "Name" => result.push(UpstreamDatum::Name(value.to_string())), "Summary" => result.push(UpstreamDatum::Summary(value.to_string())), "License" => result.push(UpstreamDatum::License(value.to_string())), "Description" => result.push(UpstreamDatum::Description(value.to_string())), "Homepage" => result.push(UpstreamDatum::Homepage(value.to_string())), _ => log::warn!("Unknown field {} in gobo Description", key), } } } } Err(e) => { if e.status() == Some(reqwest::StatusCode::NOT_FOUND) { log::error!("No description for existing gobo package {}", package.name); } else if e.status() == Some(reqwest::StatusCode::FORBIDDEN) { log::debug!("error loading {}: {}. rate limiting?", description_url, e); return Ok(Vec::new()); } else { return Err(crate::ProviderError::Other(e.to_string())); } } } Ok(result) } pub struct Gobo; impl Default for Gobo { fn default() -> Self { Self::new() } } impl Gobo { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Gobo { fn name(&self) -> &'static str { "gobo" } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository"][..] } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Possible } async fn guess_metadata(&self, name: &str) -> Result, crate::ProviderError> { guess_from_gobo(name).await } } upstream-ontologist-0.2.4/src/providers/haskell.rs000064400000000000000000000215161046102023000204700ustar 00000000000000use crate::{ Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn parse_cabal_lines( lines: impl Iterator, ) -> Vec<(Option, String, String)> { let mut ret = Vec::new(); let mut section = None; for line in lines { if line.trim_start().starts_with("--") { // Comment continue; } // Empty line if line.trim().is_empty() { section = None; continue; } let (field, value) = match line.split_once(':') { Some((field, value)) => (field.to_lowercase(), value.trim()), None => { if !line.starts_with(' ') { section = Some(line.trim().to_lowercase()); } else { log::debug!("Failed to parse line: {}", line); } continue; } }; if section.is_none() && !field.starts_with(' ') { ret.push((None, field.trim().to_string(), value.to_owned())); } else if field.starts_with(' ') { ret.push(( section.clone(), field.trim().to_lowercase(), value.to_owned(), )); } else { log::debug!("Invalid field {}", field); } } ret } pub fn guess_from_cabal_lines( lines: impl Iterator, ) -> std::result::Result, ProviderError> { let mut repo_url = None; let mut repo_branch = None; let mut repo_subpath = None; let mut results = Vec::new(); for (section, key, value) in parse_cabal_lines(lines) { match (section.as_deref(), key.as_str()) { (None, "homepage") => results.push(( UpstreamDatum::Homepage(value.to_owned()), Certainty::Certain, )), (None, "bug-reports") => results.push(( UpstreamDatum::BugDatabase(value.to_owned()), Certainty::Certain, )), (None, "name") => { results.push((UpstreamDatum::Name(value.to_owned()), Certainty::Certain)) } (None, "maintainer") => results.push(( UpstreamDatum::Maintainer(Person::from(value.as_str())), Certainty::Certain, )), (None, "copyright") => results.push(( UpstreamDatum::Copyright(value.to_owned()), Certainty::Certain, )), (None, "license") => { results.push((UpstreamDatum::License(value.to_owned()), Certainty::Certain)) } (None, "author") => results.push(( UpstreamDatum::Author(vec![Person::from(value.as_str())]), Certainty::Certain, )), (None, "synopsis") => { results.push((UpstreamDatum::Summary(value.to_owned()), Certainty::Certain)) } (None, "cabal-version") => {} (None, "build-depends") => {} (None, "build-type") => {} (Some("source-repository head"), "location") => repo_url = Some(value.to_owned()), (Some("source-repository head"), "branch") => repo_branch = Some(value.to_owned()), (Some("source-repository head"), "subdir") => repo_subpath = Some(value.to_owned()), (s, _) if s.is_some() && s.unwrap().starts_with("executable ") => {} _ => { log::debug!("Unknown field {:?} in section {:?}", key, section); } } } if let Some(repo_url) = repo_url { results.push(( UpstreamDatum::Repository(crate::vcs::unsplit_vcs_url(&crate::vcs::VcsLocation { url: repo_url.parse().unwrap(), branch: repo_branch, subpath: repo_subpath, })), Certainty::Certain, )); } Ok(results .into_iter() .map(|(datum, certainty)| UpstreamDatumWithMetadata { datum, certainty: Some(certainty), origin: None, }) .collect()) } pub fn guess_from_cabal( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); guess_from_cabal_lines( reader .lines() .map(|line| line.expect("Failed to read line")), ) } pub async fn remote_hackage_data(package: &str) -> Result { let mut ret = UpstreamMetadata::new(); for datum in guess_from_hackage(package).await? { ret.insert(datum); } Ok(ret) } pub async fn guess_from_hackage( package: &str, ) -> std::result::Result, ProviderError> { let client = reqwest::Client::builder() .user_agent(crate::USER_AGENT) .build() .unwrap(); let url: url::Url = format!( "https://hackage.haskell.org/package/{}/{}.cabal", package, package ) .parse() .unwrap(); match client.get(url).send().await { Ok(response) => { let bytes = response.bytes().await?; let reader = BufReader::new(&bytes[..]); guess_from_cabal_lines( reader .lines() .map(|line| line.expect("Failed to read line")), ) } Err(e) => match e.status() { Some(reqwest::StatusCode::NOT_FOUND) => { log::warn!("Package {} not found on Hackage", package); Ok(Vec::new()) } _ => { log::warn!("Failed to fetch package {} from Hackage: {}", package, e); Err(ProviderError::Other(format!( "Failed to fetch package {} from Hackage: {}", package, e ))) } }, } } pub struct Hackage; impl Default for Hackage { fn default() -> Self { Self::new() } } impl Hackage { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Hackage { fn name(&self) -> &'static str { "Hackage" } fn max_supported_certainty(&self) -> Certainty { Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &[ "Homepage", "Name", "Repository", "Maintainer", "Copyright", "License", "Bug-Database", ][..] } async fn guess_metadata(&self, name: &str) -> Result, ProviderError> { Ok(guess_from_hackage(name) .await? .into_iter() .map(|v| v.datum) .collect()) } } #[cfg(test)] mod parse_tests { use super::*; #[test] fn test_parse_cabal_lines() { let lines = r#"Name: foo Version: 0.0 License: BSD3 Author: John Doe Maintainer: John Doe Cabal-Version: >= 1.10 Homepage: https://example.com Executable program1 Build-Depends: HUnit Main-Is: Main.hs source-repository head type: git location: https://github.com/example/blah "#; let parsed = parse_cabal_lines(lines.lines().map(|s| s.to_owned())); assert_eq!( parsed, vec![ (None, "name".to_owned(), "foo".to_owned()), (None, "version".to_owned(), "0.0".to_owned()), (None, "license".to_owned(), "BSD3".to_owned()), (None, "author".to_owned(), "John Doe".to_owned()), ( None, "maintainer".to_owned(), "John Doe ".to_owned() ), (None, "cabal-version".to_owned(), ">= 1.10".to_owned()), ( None, "homepage".to_owned(), "https://example.com".to_owned() ), ( Some("executable program1".to_owned()), "build-depends".to_owned(), "HUnit".to_owned() ), ( Some("executable program1".to_owned()), "main-is".to_owned(), "Main.hs".to_owned() ), ( Some("source-repository head".to_owned()), "type".to_owned(), "git".to_owned() ), ( Some("source-repository head".to_owned()), "location".to_owned(), "https://github.com/example/blah".to_owned() ) ] ); } } upstream-ontologist-0.2.4/src/providers/launchpad.rs000064400000000000000000000162611046102023000210050ustar 00000000000000use crate::{load_json_url, UpstreamDatum}; use log::error; #[cfg(feature = "launchpad")] pub async fn guess_from_launchpad( package: &str, distribution: Option<&str>, suite: Option<&str>, ) -> Option> { use distro_info::DistroInfo; use distro_info::UbuntuDistroInfo; let distribution = distribution.unwrap_or("ubuntu"); let suite = suite.map_or_else( || { if distribution == "ubuntu" { let ubuntu = UbuntuDistroInfo::new().unwrap(); Some( ubuntu .ubuntu_devel(chrono::Utc::now().date_naive()) .last()? .codename() .clone(), ) } else if distribution == "debian" { Some("sid".to_string()) } else { None } }, |x| Some(x.to_string()), ); let suite = suite?; let sourcepackage_url = format!( "https://api.launchpad.net/devel/{}/{}/+source/{}", distribution, suite, package ); let sourcepackage_data = load_json_url(&url::Url::parse(sourcepackage_url.as_str()).unwrap(), None) .await .unwrap(); if let Some(productseries_url) = sourcepackage_data.get("productseries_link") { let productseries_data = load_json_url( &url::Url::parse(productseries_url.as_str().unwrap()).unwrap(), None, ) .await .unwrap(); let project_link = productseries_data.get("project_link").cloned(); if let Some(project_link) = project_link { let project_data = load_json_url( &url::Url::parse(project_link.as_str().unwrap()).unwrap(), None, ) .await .unwrap(); let mut results = Vec::new(); if let Some(homepage_url) = project_data.get("homepage_url") { results.push(UpstreamDatum::Homepage( homepage_url.as_str().unwrap().to_string(), )); } if let Some(display_name) = project_data.get("display_name") { results.push(UpstreamDatum::Name( display_name.as_str().unwrap().to_string(), )); } if let Some(sourceforge_project) = project_data.get("sourceforge_project") { results.push(UpstreamDatum::SourceForgeProject( sourceforge_project.as_str().unwrap().to_string(), )); } if let Some(wiki_url) = project_data.get("wiki_url") { results.push(UpstreamDatum::Wiki(wiki_url.as_str().unwrap().to_string())); } if let Some(summary) = project_data.get("summary") { results.push(UpstreamDatum::Summary( summary.as_str().unwrap().to_string(), )); } if let Some(download_url) = project_data.get("download_url") { results.push(UpstreamDatum::Download( download_url.as_str().unwrap().to_string(), )); } if let Some(vcs) = project_data.get("vcs") { if vcs == "Bazaar" { if let Some(branch_link) = productseries_data.get("branch_link") { let code_import_data = load_json_url( &url::Url::parse( format!("{}/+code-import", branch_link.as_str().unwrap()).as_str(), ) .unwrap(), None, ) .await .unwrap(); if let Some(url) = code_import_data.get("url") { results .push(UpstreamDatum::Repository(url.as_str().unwrap().to_string())); } } else if let Some(official_codehosting) = project_data.get("official_codehosting") { if official_codehosting == "true" { let branch_data = load_json_url( &url::Url::parse( productseries_data.as_object().unwrap()["branch_link"] .as_str() .unwrap(), ) .unwrap(), None, ) .await .unwrap(); results.push(UpstreamDatum::Repository( branch_data.as_object().unwrap()["bzr_identity"] .as_str() .unwrap() .to_owned(), )); results.push(UpstreamDatum::RepositoryBrowse( branch_data.as_object().unwrap()["web_link"] .as_str() .unwrap() .to_owned(), )); } } } else if vcs == "Git" { let repo_link = format!( "https://api.launchpad.net/devel/+git?ws.op=getByPath&path={}", project_data["name"] ); let repo_data = load_json_url(&url::Url::parse(repo_link.as_str()).unwrap(), None) .await .unwrap(); if let Some(code_import_link) = repo_data.get("code_import_link") { let code_import_data = load_json_url( &url::Url::parse(code_import_link.as_str().unwrap()).unwrap(), None, ) .await .unwrap(); if let Some(url) = code_import_data.get("url") { results .push(UpstreamDatum::Repository(url.as_str().unwrap().to_owned())); } } else if let Some(official_codehosting) = project_data.get("official_codehosting") { if official_codehosting == "true" { results.push(UpstreamDatum::Repository( repo_data["git_https_url"].as_str().unwrap().to_owned(), )); results.push(UpstreamDatum::RepositoryBrowse( repo_data["web_link"].as_str().unwrap().to_owned(), )); } } } else { error!("unknown vcs: {:?}", vcs); } } return Some(results); } } None } upstream-ontologist-0.2.4/src/providers/maven.rs000064400000000000000000000153541046102023000201560ustar 00000000000000//! Documentation: https://maven.apache.org/pom.html use crate::{ vcs, Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::warn; use std::fs::File; use std::path::Path; pub fn guess_from_pom_xml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path).expect("Failed to open file"); let file = std::io::BufReader::new(file); let root = Element::parse(file) .map_err(|e| ProviderError::ParseError(format!("Unable to parse package.xml: {}", e)))?; let mut result = Vec::new(); if root.name == "project" { if let Some(name_tag) = root.get_child("name") { if let Some(name) = name_tag.get_text() { if !name.contains('$') { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } else if let Some(artifact_id_tag) = root.get_child("artifactId") { if let Some(artifact_id) = artifact_id_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(artifact_id.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } if let Some(description_tag) = root.get_child("description") { if let Some(description) = description_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version_tag) = root.get_child("version") { if let Some(version) = version_tag.get_text() { if !version.contains('$') { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(licenses_tag) = root.get_child("licenses") { for license_tag in licenses_tag .children .iter() .filter(|c| c.as_element().map_or(false, |e| e.name == "license")) { if let Some(license_tag) = license_tag.as_element() { if let Some(name_tag) = license_tag.get_child("name") { if let Some(license_name) = name_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license_name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } } for scm_tag in root .children .iter() .filter(|c| c.as_element().map_or(false, |e| e.name == "scm")) { if let Some(scm_tag) = scm_tag.as_element() { if let Some(url_tag) = scm_tag.get_child("url") { if let Some(url) = url_tag.get_text() { if url.starts_with("scm:") && url.matches(':').count() >= 3 { let url_parts: Vec<&str> = url.splitn(3, ':').collect(); let browse_url = url_parts[2]; if vcs::plausible_browse_url(browse_url) { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(browse_url.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } else { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(connection_tag) = scm_tag.get_child("connection") { if let Some(connection) = connection_tag.get_text() { let connection_parts: Vec<&str> = connection.splitn(3, ':').collect(); if connection_parts.len() == 3 && connection_parts[0] == "scm" { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(connection_parts[2].to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { warn!("Invalid format for SCM connection: {}", connection); } } } } } for issue_mgmt_tag in root.children.iter().filter(|c| { c.as_element() .map_or(false, |e| e.name == "issueManagement") }) { if let Some(issue_mgmt_tag) = issue_mgmt_tag.as_element() { if let Some(url_tag) = issue_mgmt_tag.get_child("url") { if let Some(url) = url_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } if let Some(url_tag) = root.get_child("url") { if let Some(url) = url_tag.get_text() { if !url.starts_with("scm:") { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } Ok(result) } upstream-ontologist-0.2.4/src/providers/meson.rs000064400000000000000000000036151046102023000201660ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::process::Command; pub fn guess_from_meson( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // TODO(jelmer): consider looking for a meson build directory to call "meson // introspect" on // TODO(jelmer): mesonbuild is python; consider using its internal functions to parse // meson.build? let mut command = Command::new("meson"); command.arg("introspect").arg("--projectinfo").arg(path); let output = command.output().map_err(|_| { ProviderError::Other("meson not installed; skipping meson.build introspection".to_string()) })?; if !output.status.success() { return Err(ProviderError::Other(format!( "meson failed to run; exited with code {}", output.status.code().unwrap() ))); } let project_info: serde_json::Value = serde_json::from_slice(&output.stdout) .map_err(|e| ProviderError::Other(format!("Failed to parse meson project info: {}", e)))?; let mut results = Vec::new(); if let Some(descriptive_name) = project_info.get("descriptive_name") { if let Some(name) = descriptive_name.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = project_info.get("version") { if let Some(version_str) = version.as_str() { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version_str.to_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.2.4/src/providers/metadata_json.rs000064400000000000000000000140061046102023000216520ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::warn; use std::fs::File; use std::io::Read; use std::path::Path; pub fn guess_from_metadata_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_json::Map = match serde_json::from_str(&contents) { Ok(data) => data, Err(e) => { return Err(ProviderError::ParseError(e.to_string())); } }; let mut upstream_data: Vec = Vec::new(); for (field, value) in data.iter() { match field.as_str() { "description" => { if let Some(description) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "name" => { if let Some(name) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "version" => { if let Some(version) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "url" => { if let Some(url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "license" => { if let Some(license) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "source" => { if let Some(repository) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "summary" => { if let Some(summary) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "issues_url" => { if let Some(issues_url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(issues_url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "project_page" => { if let Some(project_page) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(project_page.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } "author" => { if let Some(author_value) = value.as_str() { let author = Person::from(author_value); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![author]), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } else if let Some(author_values) = value.as_array() { let authors: Vec = match author_values .iter() .map(|v| { Ok::(Person::from( v.as_str().ok_or("Author value is not a string")?, )) }) .collect::, _>>() { Ok(authors) => authors, Err(e) => { warn!("Error parsing author array: {}", e); continue; } }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } "operatingsystem_support" | "requirements" | "dependencies" => { // Skip these fields } _ => { warn!("Unknown field {} ({:?}) in metadata.json", field, value); } } } Ok(upstream_data) } upstream-ontologist-0.2.4/src/providers/metainfo.rs000064400000000000000000000060021046102023000206400ustar 00000000000000//! See https://www.freedesktop.org/software/appstream/docs/chap-Metadata.html use crate::{Certainty, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::path::Path; pub fn guess_from_metainfo( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { use xmltree::Element; let file = File::open(path)?; let root = Element::parse(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results: Vec = Vec::new(); for child in root.children { let child = if let Some(element) = child.as_element() { element } else { continue; }; if child.name == "id" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "project_license" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "url" { if let Some(urltype) = child.attributes.get("type") { if urltype == "homepage" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if urltype == "bugtracker" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if child.name == "description" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "summary" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if child.name == "name" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(child.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.2.4/src/providers/mod.rs000064400000000000000000000100271046102023000176170ustar 00000000000000pub mod arch; pub mod authors; pub mod autoconf; pub mod composer_json; pub mod debian; pub mod doap; pub mod git; pub mod go; pub mod gobo; pub mod haskell; pub mod launchpad; pub mod maven; pub mod meson; pub mod metadata_json; pub mod metainfo; pub mod node; pub mod nuspec; #[cfg(feature = "opam")] pub mod ocaml; pub mod package_json; pub mod package_xml; pub mod package_yaml; pub mod perl; pub mod php; pub mod pubspec; pub mod python; pub mod r; pub mod repology; pub mod ruby; pub mod rust; pub mod security_md; pub mod waf; use crate::{Certainty, GuesserSettings, UpstreamDatum, UpstreamDatumWithMetadata}; use std::io::BufRead; pub async fn guess_from_install( path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, crate::ProviderError> { let mut ret = Vec::new(); let f = std::fs::File::open(path)?; let f = std::io::BufReader::new(f); let mut urls: Vec = Vec::new(); let mut lines = f.lines(); while let Some(oline) = lines.next() { let oline = oline?; let line = oline.trim(); let mut cmdline = line.trim().trim_start_matches('$').trim().to_string(); if cmdline.starts_with("git clone ") || cmdline.starts_with("fossil clone ") { while cmdline.ends_with('\\') { cmdline.push_str(lines.next().unwrap()?.trim()); cmdline = cmdline.trim().to_string(); } if let Some(url) = if cmdline.starts_with("git clone ") { crate::vcs_command::url_from_git_clone_command(cmdline.as_bytes()) } else if cmdline.starts_with("fossil clone ") { crate::vcs_command::url_from_fossil_clone_command(cmdline.as_bytes()) } else { None } { urls.push(url); } } for m in lazy_regex::regex!("[\"'`](git clone.*)[\"`']").find_iter(line) { if let Some(url) = crate::vcs_command::url_from_git_clone_command(m.as_str().as_bytes()) { urls.push(url); } } let project_re = "([^/]+)/([^/?.()\"#>\\s]*[^-/?.()\"#>\\s])"; for m in regex::Regex::new(format!("https://github.com/{}/(.git)?", project_re).as_str()) .unwrap() .find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.as_str().trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(m) = regex::Regex::new(format!("https://github.com/{}", project_re).as_str()) .unwrap() .captures(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( m.get(0).unwrap().as_str().trim_end_matches('.').to_string(), ), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some((url, _)) = lazy_regex::regex_captures!("git://([^ ]+)", line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://([^]/]+)/([^]\\s()\"#]+)").find_iter(line) { let url: url::Url = m.as_str().trim_end_matches('.').trim().parse().unwrap(); if crate::vcs::is_gitlab_site(url.host_str().unwrap(), None).await { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url, None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } } } } Ok(ret) } upstream-ontologist-0.2.4/src/providers/node.rs000064400000000000000000000166001046102023000177700ustar 00000000000000use crate::{ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata}; use serde::Deserialize; use std::collections::HashMap; #[derive(Deserialize)] pub struct NpmVersion { #[serde(rename = "dist")] pub dist: NpmDist, #[serde(rename = "dependencies")] pub dependencies: Option>, #[serde(rename = "devDependencies")] pub dev_dependencies: Option>, #[serde(rename = "peerDependencies")] pub peer_dependencies: Option>, #[serde(rename = "optionalDependencies")] pub optional_dependencies: Option>, #[serde(rename = "bundledDependencies")] pub bundled_dependencies: Option>, #[serde(rename = "engines")] pub engines: Option>, #[serde(rename = "scripts")] pub scripts: Option>, pub name: String, pub version: String, #[serde(rename = "readmeFilename")] pub readme_filename: Option, #[serde(rename = "maintainers")] pub maintainers: Vec, #[serde(rename = "author")] pub author: Option, #[serde(rename = "repository")] pub repository: Option, #[serde(rename = "bugs")] pub bugs: Option, #[serde(rename = "homepage")] pub homepage: Option, #[serde(rename = "keywords")] pub keywords: Option>, #[serde(rename = "license")] pub license: Option, } #[derive(Deserialize)] pub struct NpmPerson { pub name: String, pub email: String, } impl From for crate::Person { fn from(person: NpmPerson) -> Self { crate::Person { name: Some(person.name), email: Some(person.email), url: None, } } } #[derive(Deserialize)] pub struct NpmDist { pub shasum: String, pub tarball: String, pub integrity: String, pub signatures: Vec, } #[derive(Deserialize)] pub struct NpmSignature { pub keyid: String, pub sig: String, } #[derive(Deserialize)] pub struct NpmRepository { #[serde(rename = "type")] pub type_: String, pub url: String, } #[derive(Deserialize)] pub struct NpmBugs { pub url: String, } #[derive(Deserialize)] pub struct NpmPackage { #[serde(rename = "_id")] pub id: String, #[serde(rename = "_rev")] pub rev: String, pub name: String, pub description: String, #[serde(rename = "dist-tags")] pub dist_tags: HashMap, pub versions: HashMap, pub readme: String, pub maintainers: Vec, pub time: HashMap, pub author: Option, pub repository: Option, pub bugs: Option, pub homepage: Option, pub keywords: Option>, pub license: Option, pub dependencies: Option>, #[serde(rename = "devDependencies")] pub dev_dependencies: Option>, #[serde(rename = "peerDependencies")] pub peer_dependencies: Option>, #[serde(rename = "optionalDependencies")] pub optional_dependencies: Option>, #[serde(rename = "bundledDependencies")] pub bundled_dependencies: Option>, pub engines: Option>, pub scripts: Option>, #[serde(rename = "readmeFilename")] pub readme_filename: Option, } impl TryInto for NpmPackage { type Error = ProviderError; fn try_into(self) -> Result { let mut metadata = UpstreamMetadata::default(); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(self.name.clone()), certainty: None, origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(self.description), certainty: None, origin: None, }); if let Some(homepage) = self.homepage { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: None, origin: None, }); } if let Some(author) = self.author { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![author.into()]), certainty: None, origin: None, }); } if let Some(repository) = self.repository { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.url), certainty: None, origin: None, }); } if let Some(bugs) = self.bugs { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugs.url), certainty: None, origin: None, }); } if let Some(license) = self.license { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: None, origin: None, }); } if let Some(keywords) = self.keywords { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: None, origin: None, }); } // Find the latest version if let Some(latest_version) = self.dist_tags.get("latest") { if let Some(version) = self.versions.get(latest_version) { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.version.clone()), certainty: None, origin: None, }); } let version_data = self.versions.get(latest_version).map_or_else( || { Err(ProviderError::Other(format!( "Could not find version {} in package {}", latest_version, &self.name ))) }, Ok, )?; metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(version_data.dist.tarball.clone()), certainty: None, origin: None, }); } Ok(metadata) } } pub async fn load_npm_package(package: &str) -> Result, crate::ProviderError> { let http_url = format!("https://registry.npmjs.org/{}", package) .parse() .unwrap(); let data = crate::load_json_url(&http_url, None).await?; Ok(serde_json::from_value(data).unwrap()) } pub async fn remote_npm_metadata(package: &str) -> Result { let data = load_npm_package(package).await?; match data { Some(data) => data.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod npm_tests { use super::*; #[test] fn test_load_npm_package() { let data = include_str!(".././testdata/npm.json"); let npm_data: NpmPackage = serde_json::from_str(data).unwrap(); assert_eq!(npm_data.name, "leftpad"); } } upstream-ontologist-0.2.4/src/providers/nuspec.rs000064400000000000000000000123171046102023000203410ustar 00000000000000use crate::xmlparse_simplify_namespaces; use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::path::Path; // Documentation: https://docs.microsoft.com/en-us/nuget/reference/nuspec pub async fn guess_from_nuspec( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { const NAMESPACES: &[&str] = &["http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd"]; // XML parsing and other logic let root = match xmlparse_simplify_namespaces(path, NAMESPACES) { Some(root) => root, None => { return Err(crate::ProviderError::ParseError( "Unable to parse nuspec".to_string(), )); } }; assert_eq!(root.name, "package", "root tag is {}", root.name); let metadata = root.get_child("metadata"); if metadata.is_none() { return Err(ProviderError::ParseError( "Unable to find metadata tag".to_string(), )); } let metadata = metadata.unwrap(); let mut result = Vec::new(); if let Some(version_tag) = metadata.get_child("version") { if let Some(version) = version_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(description_tag) = metadata.get_child("description") { if let Some(description) = description_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(authors_tag) = metadata.get_child("authors") { if let Some(authors) = authors_tag.get_text() { let authors = authors.split(',').map(Person::from).collect(); result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(project_url_tag) = metadata.get_child("projectUrl") { if let Some(project_url) = project_url_tag.get_text() { let repo_url = crate::vcs::guess_repo_from_url(&url::Url::parse(&project_url).unwrap(), None) .await; if let Some(repo_url) = repo_url { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(project_url.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(license_tag) = metadata.get_child("license") { if let Some(license) = license_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(copyright_tag) = metadata.get_child("copyright") { if let Some(copyright) = copyright_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(copyright.into_owned()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(title_tag) = metadata.get_child("title") { if let Some(title) = title_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(title.into_owned()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } if let Some(summary_tag) = metadata.get_child("summary") { if let Some(summary) = summary_tag.get_text() { result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.into_owned()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } } if let Some(repository_tag) = metadata.get_child("repository") { if let Some(repo_url) = repository_tag.attributes.get("url") { let branch = repository_tag.attributes.get("branch"); result.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(crate::vcs::unsplit_vcs_url( &crate::vcs::VcsLocation { url: repo_url.parse().unwrap(), branch: branch.cloned(), subpath: None, }, )), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(result) } upstream-ontologist-0.2.4/src/providers/ocaml.rs000064400000000000000000000213161046102023000201360ustar 00000000000000//! Documentation: https://opam.ocaml.org/doc/Manual.html#Package-definitions use crate::{Certainty, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use log::warn; use opam_file_rs::value::{OpamFileItem, OpamFileSection, ValueKind}; use std::fs::File; use std::io::Read; use std::path::Path; #[cfg(feature = "opam")] pub fn guess_from_opam( path: &Path, _trust_package: bool, ) -> std::result::Result, ProviderError> { let mut f = File::open(path)?; let mut contents = String::new(); f.read_to_string(&mut contents)?; let opam = opam_file_rs::parse(contents.as_str()) .map_err(|e| ProviderError::ParseError(format!("Failed to parse OPAM file: {:?}", e)))?; let mut results: Vec = Vec::new(); fn find_item<'a>(section: &'a OpamFileSection, name: &str) -> Option<&'a OpamFileItem> { for child in section.section_item.iter() { match child { OpamFileItem::Variable(_, n, _) if n == name => return Some(child), _ => (), } } None } for entry in opam.file_contents { match entry { OpamFileItem::Variable(_, name, value) if name == "maintainer" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for maintainer in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person::from(value.as_str())), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "license" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for license in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "homepage" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for homepage in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Section(_, section) if section.section_name.as_deref() == Some("dev-repo") => { match find_item(§ion, "repository") { Some(OpamFileItem::Variable(_, _, ref value)) => { let value = match value.kind { ValueKind::String(ref s) => s, _ => { warn!("Unexpected type for dev-repo in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(value.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } Some(o) => { warn!("Unexpected type for dev-repo in OPAM file: {:?}", o); continue; } None => { warn!("Missing repository for dev-repo in OPAM file"); continue; } } } OpamFileItem::Variable(_, name, value) if name == "bug-reports" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for bug-reports in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "synopsis" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for synopsis in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "description" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for description in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "doc" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for doc in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "version" => { let value = match value.kind { ValueKind::String(s) => s, _ => { warn!("Unexpected type for version in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, value) if name == "authors" => { let value = match value.kind { ValueKind::String(s) => vec![Person::from(s.as_str())], ValueKind::List(ref l) => l .iter() .filter_map(|v| match v.kind { ValueKind::String(ref s) => Some(Person::from(s.as_str())), _ => { warn!("Unexpected type for authors in OPAM file: {:?}", &value); None } }) .collect(), _ => { warn!("Unexpected type for authors in OPAM file: {:?}", value); continue; } }; results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(value), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } OpamFileItem::Variable(_, name, _) => { warn!("Unexpected variable in OPAM file: {}", name); } OpamFileItem::Section(_, section) => { warn!("Unexpected section in OPAM file: {:?}", section); } } } Ok(results) } upstream-ontologist-0.2.4/src/providers/package_json.rs000064400000000000000000000236551046102023000214770ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::error; use std::path::Path; use url::Url; pub fn guess_from_package_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // see https://docs.npmjs.com/cli/v7/configuring-npm/package-json let file = std::fs::File::open(path)?; let package: serde_json::Value = serde_json::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); let package = match package { serde_json::Value::Object(package) => package, _ => { return Err(ProviderError::ParseError( "package.json is not an object".to_string(), )); } }; for (field, value) in package { match field.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "homepage" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "demo" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Demo(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "repository" => { let repo_url = if let Some(repo_url) = value.as_str() { Some(repo_url) } else if let Some(repo) = value.as_object() { if let Some(repo_url) = repo.get("url") { repo_url.as_str() } else { None } } else { None }; if let Some(repo_url) = repo_url { match Url::parse(repo_url) { Ok(url) if url.scheme() == "github" => { // Some people seem to default to github. :( let repo_url = format!("https://github.com/{}", url.path()); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } Err(url::ParseError::RelativeUrlWithoutBase) => { // Some people seem to default to github. :( let repo_url = format!("https://github.com/{}", repo_url); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url.to_string()), certainty: Some(Certainty::Likely), origin: Some(path.into()), }); } Ok(url) => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Err(e) => { panic!("Failed to parse repository URL: {}", e); } } } } "bugs" => { if let Some(url) = value.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(email) = value.get("email").and_then(serde_json::Value::as_str) { let url = format!("mailto:{}", email); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "keywords" => { if let Some(keywords) = value.as_array() { let keywords = keywords .iter() .filter_map(|keyword| keyword.as_str()) .map(String::from) .collect(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "author" => { if let Some(author) = value.as_object() { let name = author .get("name") .and_then(serde_json::Value::as_str) .map(String::from); let url = author .get("url") .and_then(serde_json::Value::as_str) .map(String::from); let email = author .get("email") .and_then(serde_json::Value::as_str) .map(String::from); let person = Person { name, url, email }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![person]), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } else if let Some(author) = value.as_str() { let person = Person::from(author); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![person]), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } else { error!("Unsupported type for author in package.json: {:?}", value); } } "dependencies" | "private" | "devDependencies" | "scripts" | "files" | "main" => { // Do nothing, skip these fields } _ => { error!("Unknown package.json field {} ({:?})", field, value); } } } Ok(upstream_data) } #[cfg(test)] mod package_json_tests { use super::*; #[test] fn test_dummy() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("package.json"); std::fs::write( &path, r#"{ "name": "mozillaeslintsetup", "description": "This package file is for setup of ESLint.", "repository": {}, "license": "MPL-2.0", "dependencies": { "eslint": "4.18.1", "eslint-plugin-html": "4.0.2", "eslint-plugin-mozilla": "file:tools/lint/eslint/eslint-plugin-mozilla", "eslint-plugin-no-unsanitized": "2.0.2", "eslint-plugin-react": "7.1.0", "eslint-plugin-spidermonkey-js": "file:tools/lint/eslint/eslint-plugin-spidermonkey-js" }, "devDependencies": {} } "#, ) .unwrap(); let ret = guess_from_package_json(&path, &GuesserSettings::default()).unwrap(); assert_eq!( ret, vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary( "This package file is for setup of ESLint.".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("MPL-2.0".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("mozillaeslintsetup".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()) } ] ); } } upstream-ontologist-0.2.4/src/providers/package_xml.rs000064400000000000000000000161231046102023000213160ustar 00000000000000use crate::xmlparse_simplify_namespaces; use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use log::error; use std::path::Path; pub fn guess_from_package_xml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use xmltree::{Element, XMLNode}; const NAMESPACES: &[&str] = &[ "http://pear.php.net/dtd/package-2.0", "http://pear.php.net/dtd/package-2.1", ]; let root = xmlparse_simplify_namespaces(path, NAMESPACES) .ok_or_else(|| ProviderError::ParseError("Unable to parse package.xml".to_string()))?; assert_eq!(root.name, "package", "root tag is {:?}", root.name); let mut upstream_data: Vec = Vec::new(); let mut leads: Vec<&Element> = Vec::new(); let mut maintainers: Vec<&Element> = Vec::new(); let mut authors: Vec<&Element> = Vec::new(); for child_element in &root.children { if let XMLNode::Element(ref element) = child_element { match element.name.as_str() { "name" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "summary" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "description" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "version" => { if let Some(release_tag) = element.get_child("release") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version( release_tag.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "license" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(element.get_text().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "url" => { if let Some(url_type) = element.attributes.get("type") { match url_type.as_str() { "repository" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( element.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } "bugtracker" => { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase( element.get_text().unwrap().to_string(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } _ => {} } } } "lead" => { leads.push(element); } "maintainer" => { maintainers.push(element); } "author" => { authors.push(element); } "stability" | "dependencies" | "providesextension" | "extsrcrelease" | "channel" | "notes" | "contents" | "date" | "time" | "depend" | "exec_depend" | "buildtool_depend" => { // Do nothing, skip these fields } _ => { error!("Unknown package.xml tag {}", element.name); } } } } for lead_element in leads.iter().take(1) { let name_el = lead_element.get_child("name").unwrap().get_text(); let email_el = lead_element .get_child("email") .map(|s| s.get_text().unwrap()); let active_el = lead_element .get_child("active") .map(|s| s.get_text().unwrap()); if let Some(active_el) = active_el { if active_el != "yes" { continue; } } let person = Person { name: name_el.map(|s| s.to_string()), email: email_el.map(|s| s.to_string()), ..Default::default() }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if maintainers.len() == 1 { let maintainer_element = maintainers[0]; let name_el = maintainer_element.get_text().map(|s| s.into_owned()); let email_el = maintainer_element.attributes.get("email"); let person = Person { name: name_el, email: email_el.map(|s| s.to_string()), ..Default::default() }; upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if !authors.is_empty() { let persons = authors .iter() .map(|author_element| { let name_el = author_element.get_text().unwrap().into_owned(); let email_el = author_element.attributes.get("email"); Person { name: Some(name_el), email: email_el.map(|s| s.to_string()), ..Default::default() } }) .collect(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(persons), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } Ok(upstream_data) } upstream-ontologist-0.2.4/src/providers/package_yaml.rs000064400000000000000000000106651046102023000214650ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; use std::path::Path; pub fn guess_from_package_yaml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let reader = std::fs::File::open(path)?; let data: serde_yaml::Value = serde_yaml::from_reader(reader).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut ret = Vec::new(); if let Some(name) = data.get("name") { if let Some(name) = name.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = data.get("version") { if let Some(version) = version.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(authors) = data.get("author") { if let Some(author) = authors.as_str() { let authors = author.split(',').collect::>(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors.into_iter().map(Person::from).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(maintainers) = data.get("maintainer") { if let Some(maintainer) = maintainers.as_str() { let maintainers = maintainer.split(',').collect::>(); let mut maintainers = maintainers .into_iter() .map(Person::from) .collect::>(); if let Some(maintainer) = maintainers.pop() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainer), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } if let Some(homepage) = data.get("homepage") { if let Some(homepage) = homepage.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(description) = data.get("description") { if let Some(description) = description.as_str() { if !description.starts_with("Please see the README") { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } } if let Some(synopsis) = data.get("synopsis") { if let Some(synopsis) = synopsis.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(synopsis.to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } if let Some(license) = data.get("license") { if let Some(license) = license.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(github) = data.get("github") { if let Some(github) = github.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!("https://github.com/{}", github)), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(repository) = data.get("repository") { if let Some(repository) = repository.as_str() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } Ok(ret) } upstream-ontologist-0.2.4/src/providers/perl.rs000064400000000000000000000447421046102023000200150ustar 00000000000000use crate::{ Certainty, GuesserSettings, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use lazy_regex::regex; use serde::Deserialize; use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader, Read}; use std::path::{Path, PathBuf}; use std::process::Command; pub fn guess_from_pod( contents: &str, origin: &Origin, ) -> std::result::Result, ProviderError> { let mut by_header: HashMap = HashMap::new(); let mut inheader: Option = None; for line in contents.lines() { if line.starts_with("=head1 ") { inheader = Some(line.trim_start_matches("=head1 ").to_string()); by_header.insert(inheader.clone().unwrap().to_uppercase(), String::new()); } else if let Some(header) = &inheader { if let Some(value) = by_header.get_mut(&header.to_uppercase()) { value.push_str(line) } } } let mut upstream_data: Vec = Vec::new(); if let Some(description) = by_header.get("DESCRIPTION") { let mut description = description.trim_start_matches('\n').to_string(); description = regex!(r"[FXZSCBI]\\<([^>]+)>") .replace_all(&description, "$1") .into_owned(); description = regex!(r"L\\<([^\|]+)\|([^\\>]+)\\>") .replace_all(&description, "$2") .into_owned(); description = regex!(r"L\\<([^\\>]+)\\>") .replace_all(&description, "$1") .into_owned(); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } if let Some(name) = by_header.get("NAME") { let lines: Vec<&str> = name.trim().lines().collect(); if let Some(line) = lines.first() { if let Some((name, summary)) = line.split_once(" - ") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } else if !line.contains(' ') { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(line.trim().to_string()), certainty: Some(Certainty::Confident), origin: Some(origin.clone()), }); } } } Ok(upstream_data) } pub fn guess_from_perl_module( path: &Path, ) -> std::result::Result, ProviderError> { match Command::new("perldoc").arg("-u").arg(path).output() { Ok(output) => guess_from_pod( &String::from_utf8_lossy(&output.stdout), &Origin::Path(path.into()), ), Err(e) => Err(ProviderError::Other(format!( "Error running perldoc: {}", e ))), } } pub fn guess_from_perl_dist_name( path: &Path, dist_name: &str, ) -> std::result::Result, ProviderError> { let mod_path = PathBuf::from(format!( "{}/lib/{}.pm", std::path::Path::new(path) .parent() .expect("parent") .display(), dist_name.replace('-', "/") )); if mod_path.exists() { guess_from_perl_module(mod_path.as_path()) } else { Ok(Vec::new()) } } #[cfg(feature = "dist-ini")] pub fn guess_from_dist_ini( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let parser = ini::Ini::load_from_file(path) .map_err(|e| ProviderError::ParseError(format!("Error parsing dist.ini: {}", e)))?; let dist_name = parser .get_from::<&str>(None, "name") .map(|name| UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let version = parser .get_from::<&str>(None, "version") .map(|version| UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let summary = parser .get_from::<&str>(None, "abstract") .map(|summary| UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let bug_database = parser .get_from(Some("MetaResources"), "bugtracker.web") .map(|bugtracker| UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugtracker.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let repository = parser .get_from(Some("MetaResources"), "repository.url") .map(|repository| UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let license = parser .get_from::<&str>(None, "license") .map(|license| UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let copyright = match ( parser.get_from::<&str>(None, "copyright_year"), parser.get_from::<&str>(None, "copyright_holder"), ) { (Some(year), Some(holder)) => Some(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(format!("{} {}", year, holder)), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), _ => None, }; let mut upstream_data: Vec = Vec::new(); if let Some(dist_name) = dist_name { upstream_data.push(dist_name); } if let Some(version) = version { upstream_data.push(version); } if let Some(summary) = summary { upstream_data.push(summary); } if let Some(bug_database) = bug_database { upstream_data.push(bug_database); } if let Some(repository) = repository { upstream_data.push(repository); } if let Some(license) = license { upstream_data.push(license); } if let Some(copyright) = copyright { upstream_data.push(copyright); } if let Some(dist_name) = parser.get_from::<&str>(None, "name") { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } Ok(upstream_data) } pub fn guess_from_meta_json( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_json::Map = serde_json::from_str(&contents) .map_err(|e| ProviderError::ParseError(format!("Error parsing META.json: {}", e)))?; let mut upstream_data: Vec = Vec::new(); if let Some(name) = data.get("name").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = data.get("version").and_then(serde_json::Value::as_str) { let version = version.strip_prefix('v').unwrap_or(version); upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(summary) = data.get("abstract").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(resources) = data.get("resources").and_then(serde_json::Value::as_object) { if let Some(bugtracker) = resources .get("bugtracker") .and_then(serde_json::Value::as_object) { if let Some(web) = bugtracker.get("web").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(web.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); // TODO: Support resources["bugtracker"]["mailto"] } } if let Some(homepage) = resources .get("homepage") .and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repo) = resources .get("repository") .and_then(serde_json::Value::as_object) { if let Some(url) = repo.get("url").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(web) = repo.get("web").and_then(serde_json::Value::as_str) { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::RepositoryBrowse(web.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } // Wild guess: if let Some(dist_name) = data.get("name").and_then(serde_json::Value::as_str) { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } Ok(upstream_data) } /// Guess upstream metadata from a META.yml file. /// /// See http://module-build.sourceforge.net/META-spec-v1.4.html for the /// specification of the format. pub fn guess_from_meta_yml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let data: serde_yaml::Value = serde_yaml::from_str(&contents) .map_err(|e| ProviderError::ParseError(format!("Error parsing META.yml: {}", e)))?; let mut upstream_data = Vec::new(); if let Some(name) = data.get("name") { if let Some(name) = name.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(license) = data.get("license") { if let Some(license) = license.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(version) = data.get("version") { if let Some(version) = version.as_str() { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } if let Some(resources) = data.get("resources") { if let Some(bugtracker) = resources.get("bugtracker") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bugtracker.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = resources.get("homepage") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = resources.get("repository") { if let Some(url) = repository.get("url") { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } // Wild guess: if let Some(dist_name) = data.get("name") { if let Some(dist_name) = dist_name.as_str() { upstream_data.extend(guess_from_perl_dist_name(path, dist_name)?); } } Ok(upstream_data) } pub fn guess_from_makefile_pl( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let mut dist_name = None; let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); let name_regex = regex!("name '([^'\"]+)';$"); let repository_regex = regex!("repository '([^'\"]+)';$"); for line in reader.lines().map_while(Result::ok) { if let Some(captures) = name_regex.captures(&line) { dist_name = Some(captures.get(1).unwrap().as_str().to_owned()); let name = dist_name.as_ref().unwrap().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if let Some(captures) = repository_regex.captures(&line) { let repository = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } if let Some(dist_name) = dist_name { results.extend(guess_from_perl_dist_name(path, &dist_name)?); } Ok(results) } #[derive(Deserialize)] pub struct Module { pub version_numified: f64, pub version: String, pub authorized: bool, pub name: String, pub indexed: bool, } #[derive(Deserialize)] pub struct Stat { pub uid: isize, pub mtime: isize, pub size: isize, pub mode: isize, pub gid: isize, } #[derive(Deserialize)] pub struct CpanModule { pub maturity: String, pub release: String, pub author: String, pub slop: isize, pub download_url: url::Url, pub module: Vec, pub pod_lines: Vec, pub version: String, pub deprecated: bool, pub level: isize, pub mime: String, pub date: String, pub path: String, pub distribution: String, pub pod: String, pub name: String, pub sloc: isize, pub stat: Stat, pub version_numified: f64, pub binary: bool, pub id: String, pub directory: bool, pub indexed: bool, pub authorized: bool, } impl TryFrom for UpstreamMetadata { type Error = crate::ProviderError; fn try_from(value: CpanModule) -> Result { let mut metadata = UpstreamMetadata::default(); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.name), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.version), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.download_url.to_string()), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(value.download_url.to_string()), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![crate::Person::from(value.author.as_str())]), certainty: Some(Certainty::Certain), origin: None, }); Ok(metadata) } } pub async fn load_cpan_data(module: &str) -> Result, crate::ProviderError> { let url = format!("https://fastapi.metacpan.org/v1/release/{}", module) .parse() .unwrap(); let data = crate::load_json_url(&url, None).await?; Ok(Some(serde_json::from_value(data).unwrap())) } pub async fn remote_cpan_data(module: &str) -> Result { let data = load_cpan_data(module).await?; match data { Some(data) => data.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_load_from_json() { let text = include_str!("../testdata/cpan.json"); let cpan_module: CpanModule = serde_json::from_str(text).unwrap(); assert_eq!("Parse-Pidl-0.02", cpan_module.release); } } upstream-ontologist-0.2.4/src/providers/php.rs000064400000000000000000000075441046102023000176410ustar 00000000000000use crate::{ProviderError, UpstreamDatum}; use select::document::Document; use select::predicate::{And, Name, Predicate}; pub async fn guess_from_pecl_package(package: &str) -> Result, ProviderError> { let url = format!("https://pecl.php.net/packages/{}", package); let client = reqwest::Client::builder() .user_agent(crate::USER_AGENT) // PECL is slow .timeout(std::time::Duration::from_secs(15)) .build() .unwrap(); let response = client .get(url) .send() .await .map_err(|e| ProviderError::Other(e.to_string()))?; match response.status() { reqwest::StatusCode::NOT_FOUND => { return Ok(vec![]); } status if !status.is_success() => { return Err(ProviderError::Other(format!("HTTP error: {}", status))); } _ => {} } let body = response .text() .await .map_err(|e| ProviderError::Other(e.to_string()))?; guess_from_pecl_page(&body) } struct TextContains<'a>(&'a str); impl<'a> Predicate for TextContains<'a> { fn matches(&self, node: &select::node::Node) -> bool { node.text().contains(self.0) } } fn find_tags_by_text<'a>( document: &'a Document, tag_name: &'a str, text: &'a str, ) -> Vec> { document .find(And(Name(tag_name), TextContains(text))) .collect() } fn guess_from_pecl_page(body: &str) -> Result, ProviderError> { let document = Document::from(body); let mut ret = Vec::new(); let browse_source_selector = find_tags_by_text(&document, "a", "Browse Source") .into_iter() .next(); if let Some(node) = browse_source_selector { ret.push(UpstreamDatum::RepositoryBrowse( node.attr("href").unwrap().to_string(), )); } let package_bugs_selector = find_tags_by_text(&document, "a", "Package Bugs") .into_iter() .next(); if let Some(node) = package_bugs_selector { ret.push(UpstreamDatum::BugDatabase( node.attr("href").unwrap().to_string(), )); } let homepage_selector = find_tags_by_text(&document, "th", "Homepage") .into_iter() .next() .unwrap() .parent() .unwrap() .find(Name("td").descendant(Name("a"))) .next(); if let Some(node) = homepage_selector { ret.push(UpstreamDatum::Homepage( node.attr("href").unwrap().to_string(), )); } Ok(ret) } pub struct Pecl; impl Default for Pecl { fn default() -> Self { Self::new() } } impl Pecl { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for Pecl { fn name(&self) -> &'static str { "Pecl" } fn max_supported_certainty(&self) -> crate::Certainty { crate::Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Repository", "Bug-Database"] } async fn guess_metadata(&self, name: &str) -> Result, ProviderError> { guess_from_pecl_package(name).await } } #[cfg(test)] mod pecl_tests { use super::*; #[test] fn test_guess_from_pecl_page() { let text = include_str!("../testdata/pecl.html"); let ret = guess_from_pecl_page(text).unwrap(); assert_eq!( ret, vec![ UpstreamDatum::RepositoryBrowse( "https://github.com/eduardok/libsmbclient-php".to_string() ), UpstreamDatum::BugDatabase( "https://github.com/eduardok/libsmbclient-php/issues".to_string() ), UpstreamDatum::Homepage("https://github.com/eduardok/libsmbclient-php".to_string()) ] ); } } upstream-ontologist-0.2.4/src/providers/pubspec.rs000064400000000000000000000052051046102023000205030ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use std::fs::File; use std::path::Path; #[derive(serde::Deserialize)] struct Pubspec { name: Option, description: Option, version: Option, homepage: Option, repository: Option, documentation: Option, issue_tracker: Option, } pub fn guess_from_pubspec_yaml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let pubspec: Pubspec = serde_yaml::from_reader(file).map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut upstream_data: Vec = Vec::new(); if let Some(name) = pubspec.name { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = pubspec.description { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = pubspec.version { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = pubspec.homepage { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = pubspec.repository { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(documentation) = pubspec.documentation { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(issue_tracker) = pubspec.issue_tracker { upstream_data.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(issue_tracker), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(upstream_data) } upstream-ontologist-0.2.4/src/providers/python.rs000064400000000000000000001370701046102023000203710ustar 00000000000000use crate::{ vcs, Certainty, GuesserSettings, Origin, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use log::{debug, warn}; use serde::Deserialize; #[cfg(feature = "pyo3")] use pyo3::prelude::*; use std::collections::HashMap; use std::path::Path; #[cfg(feature = "python-pkginfo")] pub async fn guess_from_pkg_info( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let contents = std::fs::read(path)?; let dist = python_pkginfo::Metadata::parse(contents.as_slice()).map_err(|e| { ProviderError::ParseError(format!("Failed to parse python package metadata: {}", e)) })?; let mut ret = vec![]; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(dist.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(dist.version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(homepage) = dist.home_page { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(summary) = dist.summary { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = dist.description { ret.extend(parse_python_long_description( description.as_str(), dist.description_content_type.as_deref(), &Origin::Path(path.to_path_buf()), )?); } ret.extend(parse_python_project_urls( dist.project_urls .iter() .map(|k| k.split_once(", ").unwrap()) .map(|(k, v)| (k.to_string(), v.to_string())), &Origin::Path(path.to_path_buf()), )); if dist.author.is_some() || dist.author_email.is_some() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: dist.author, email: dist.author_email, url: None, }]), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if dist.maintainer.is_some() || dist.maintainer_email.is_some() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: dist.maintainer, email: dist.maintainer_email, url: None, }), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = dist.license { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(keywords) = dist.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords.split(", ").map(|s| s.to_string()).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(download_url) = dist.download_url { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(ret) } #[cfg(feature = "pyproject-toml")] pub fn guess_from_pyproject_toml( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let content = std::fs::read_to_string(path)?; let mut ret = Vec::new(); use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct PyProjectToml { #[serde(flatten)] inner: pyproject_toml::PyProjectToml, tool: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct Tool { poetry: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] pub struct ToolPoetry { version: Option, description: Option, license: Option, repository: Option, name: String, urls: Option>, keywords: Option>, authors: Option>, homepage: Option, documentation: Option, } let pyproject: PyProjectToml = toml::from_str(content.as_str()).map_err(|e| ProviderError::ParseError(e.to_string()))?; if let Some(inner_project) = pyproject.inner.project { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(inner_project.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(version) = inner_project.version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(pyproject_toml::License::Spdx(license)) = inner_project.license.as_ref() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license.clone()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } fn contact_to_person(contact: &pyproject_toml::Contact) -> Person { Person { name: contact.name().map(|s| s.to_string()), email: contact.email().map(|s| s.to_string()), url: None, } } if let Some(authors) = inner_project.authors { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors.iter().map(contact_to_person).collect()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(maintainers) = inner_project.maintainers { let maintainers: Vec<_> = maintainers.iter().map(contact_to_person).collect(); let certainty = if maintainers.len() == 1 { Certainty::Certain } else { Certainty::Possible }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(maintainers[0].clone()), certainty: Some(certainty), origin: Some(path.into()), }); } if let Some(keywords) = inner_project.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(urls) = inner_project.urls { ret.extend(parse_python_project_urls( urls.into_iter(), &Origin::Path(path.to_path_buf()), )); } if let Some(classifiers) = inner_project.classifiers { ret.extend(parse_python_classifiers( classifiers.iter().map(|s| s.as_str()), &Origin::Path(path.to_path_buf()), )); } } if let Some(tool) = pyproject.tool { if let Some(poetry) = tool.poetry { if let Some(version) = poetry.version { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = poetry.description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = poetry.license { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = poetry.repository { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(poetry.name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(urls) = poetry.urls { ret.extend(parse_python_project_urls( urls.into_iter(), &Origin::Path(path.to_path_buf()), )); } if let Some(keywords) = poetry.keywords { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(authors) = poetry.authors { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author( authors.iter().map(|p| Person::from(p.as_str())).collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = poetry.homepage { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(documentation) = poetry.documentation { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } Ok(ret) } fn parse_python_project_urls( urls: impl Iterator, origin: &Origin, ) -> Vec { let mut ret = Vec::new(); for (url_type, url) in urls { match url_type.as_str() { "GitHub" | "Repository" | "Source Code" | "Source" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Bug Tracker" | "Bug Reports" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Documentation" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Funding" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Funding(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "Homepage" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } _u => { debug!("Unknown Python project URL type: {}", url_type); } } } ret } fn parse_python_long_description( long_description: &str, content_type: Option<&str>, origin: &Origin, ) -> std::result::Result, ProviderError> { if long_description.is_empty() { return Ok(vec![]); } let content_type = content_type.unwrap_or("text/plain"); let mut content_type = content_type.split(';').next().unwrap(); if long_description.contains("-*-restructuredtext-*-") { content_type = "text/restructured-text"; } let mut ret = vec![]; match content_type { "text/plain" => { let lines = long_description.split('\n').collect::>(); if lines.len() > 30 { debug!("Long description is too long ({} lines)", lines.len()); return Ok(vec![]); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(long_description.to_string()), certainty: Some(Certainty::Possible), origin: Some(origin.clone()), }); } "text/restructured-text" | "text/x-rst" => { let (description, extra_md) = crate::readme::description_from_readme_rst(long_description) .map_err(|e| ProviderError::Other(e.to_string()))?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(Origin::Other( "python long description (restructuredText)".to_string(), )), }); } ret.extend(extra_md); } "text/markdown" => { let (description, extra_md) = crate::readme::description_from_readme_md(long_description) .map_err(|e| ProviderError::Other(e.to_string()))?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(Origin::Other( "python long description (markdown)".to_string(), )), }); } ret.extend(extra_md); } _ => { warn!("Unknown content type: {}", content_type); } } Ok(ret) } pub async fn parse_python_url(url: &str) -> Vec { let repo = vcs::guess_repo_from_url(&url::Url::parse(url).unwrap(), None).await; if let Some(repo) = repo { return vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo), certainty: Some(Certainty::Likely), origin: None, }]; } vec![UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Likely), origin: None, }] } #[cfg(feature = "setup-cfg")] pub async fn guess_from_setup_cfg( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let setup_cfg = ini::Ini::load_from_file(path).map_err(|e| ProviderError::ParseError(e.to_string()))?; let metadata = match setup_cfg.section(Some("metadata")) { Some(metadata) => metadata, None => { debug!("No [metadata] section in setup.cfg"); return Ok(vec![]); } }; let origin = Origin::Path(path.to_path_buf()); let mut ret = vec![]; for (field, value) in metadata.iter() { match field { "name" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "version" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "url" => { ret.extend(parse_python_url(value).await); } "description" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "summary" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "long_description" => { if let Some(path) = value.strip_prefix(value) { if path.contains('/') { debug!("Ignoring long_description path: {}", path); continue; } let value = match std::fs::read_to_string(path) { Ok(value) => value, Err(e) => { debug!("Failed to read long_description file: {}", e); continue; } }; ret.extend(parse_python_long_description( &value, metadata.get("long_description_content_type"), &origin, )?); } else { ret.extend(parse_python_long_description( value, metadata.get("long_description_content_type"), &origin, )?); } } "maintainer" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(value.to_string()), email: metadata .get("maintainer_email") .or_else(|| metadata.get("maintainer-email")) .map(|s| s.to_string()), url: None, }), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "author" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(value.to_string()), email: metadata .get("author_email") .or_else(|| metadata.get("author-email")) .map(|s| s.to_string()), url: None, }]), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "project_urls" => { let urls = value.split('\n').filter_map(|s| { if s.is_empty() { return None; } let (key, value) = match s.split_once('=') { Some((key, value)) => (key, value), None => { debug!("Invalid project_urls line: {}", s); return None; } }; Some((key.to_string(), value.to_string())) }); ret.extend(parse_python_project_urls(urls, &origin)); } "license" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "home-page" => { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(value.to_string()), certainty: Some(Certainty::Certain), origin: Some(origin.clone()), }); } "long_description_content_type" | "maintainer_email" | "author_email" | "maintainer-email" | "author-email" => { // Ignore these, they are handled elsewhere } _ => { warn!("Unknown setup.cfg field: {}", field); } } } Ok(ret) } #[cfg(feature = "pyo3")] async fn guess_from_setup_py_executed( path: &Path, ) -> std::result::Result, ProviderError> { // Ensure only one thread can run this function at a time static SETUP_PY_LOCK: tokio::sync::Mutex<()> = tokio::sync::Mutex::const_new(()); let _guard = SETUP_PY_LOCK.lock().await; let mut ret = Vec::new(); // Import setuptools, just in case it replaces distutils // use pyo3::types::PyDict; pyo3::prepare_freethreaded_python(); let mut long_description = None; let mut urls = vec![]; Python::with_gil(|py| { let _ = py.import_bound("setuptools"); let run_setup = py.import_bound("distutils.core")?.getattr("run_setup")?; let os = py.import_bound("os")?; let orig = match os.getattr("getcwd")?.call0() { Ok(orig) => Some(orig.extract::()?), Err(e) => { debug!("Failed to get current directory: {}", e); None } }; let parent = path.parent().unwrap(); os.getattr("chdir")?.call1((parent,))?; let result = || -> PyResult<_> { let kwargs = PyDict::new_bound(py); kwargs.set_item("stop_after", "config")?; run_setup.call((path,), Some(&kwargs)) }(); if let Some(orig) = orig { os.getattr("chdir")?.call1((orig,))?; } let result = result?; if let Some(name) = result.call_method0("get_name")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(version) = result.call_method0("get_version")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(url) = result .call_method0("get_url")? .extract::>()? { urls.push(url); } if let Some(download_url) = result.call_method0("get_download_url")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = result.call_method0("get_license")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Likely), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(contact) = result.call_method0("get_contact")?.extract()? { let contact: String = match result .call_method0("get_contact_email")? .extract::>()? { Some(email) => format!("{} <{}>", contact, email), None => contact, }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Contact(contact), certainty: Some(Certainty::Certain), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(description) = result.call_method0("get_description")?.extract()? { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(Origin::Path(path.to_path_buf())), }); } if let Some(description) = result .call_method0("get_long_description")? .extract::>()? { let content_type = match result.getattr("long_description_content_type") { Ok(content_type) => content_type.extract::>(), Err(e) if e.is_instance_of::(py) => Ok(None), Err(e) => return Err(e), }?; long_description = Some((description, content_type)); } if let Ok(metadata) = result.getattr("metadata") { if let Ok(project_urls) = metadata.getattr("project_urls") { ret.extend(parse_python_project_urls( project_urls .extract::>()? .into_iter(), &Origin::Path(path.to_path_buf()), )); } } Ok::<(), PyErr>(()) }) .map_err(|e| { warn!("Failed to run setup.py: {}", e); ProviderError::Other(e.to_string()) })?; if let Some((long_description, long_description_content_type)) = long_description { ret.extend(parse_python_long_description( long_description.as_str(), long_description_content_type.as_deref(), &Origin::Path(path.to_path_buf()), )?); } for url in urls { ret.extend(parse_python_url(&url).await); } Ok(ret) } #[cfg(feature = "pyo3")] pub async fn guess_from_setup_py( path: &Path, trust_package: bool, ) -> std::result::Result, ProviderError> { if trust_package { guess_from_setup_py_executed(path).await } else { guess_from_setup_py_parsed(path).await } } #[cfg(feature = "pyo3")] async fn guess_from_setup_py_parsed( path: &Path, ) -> std::result::Result, ProviderError> { pyo3::prepare_freethreaded_python(); let code = match std::fs::read_to_string(path) { Ok(setup_text) => setup_text, Err(e) => { warn!("Failed to read setup.py: {}", e); return Err(ProviderError::IoError(e)); } }; let mut long_description = None; let mut ret = Vec::new(); let mut urls = vec![]; Python::with_gil(|py| { let ast = py.import_bound("ast").unwrap(); // Based on pypi.py in https://github.com/nexB/scancode-toolkit/blob/develop/src/packagedcode/pypi.py // // Copyright (c) nexB Inc. and others. All rights reserved. // ScanCode is a trademark of nexB Inc. // SPDX-License-Identifier: Apache-2.0 let tree = ast.call_method1("parse", (code,))?; let mut setup_args: HashMap = HashMap::new(); let ast_expr = ast.getattr("Expr").unwrap(); let ast_call = ast.getattr("Call").unwrap(); let ast_assign = ast.getattr("Assign").unwrap(); let ast_name = ast.getattr("Name").unwrap(); for statement in tree.getattr("body")?.iter()? { let statement = statement?; // We only care about function calls or assignments to functions named // `setup` or `main` if (statement.is_instance(&ast_expr)? || statement.is_instance(&ast_call)? || statement.is_instance(&ast_assign)?) && statement.getattr("value")?.is_instance(&ast_call)? && statement .getattr("value")? .getattr("func")? .is_instance(&ast_name)? && (statement.getattr("value")?.getattr("func")?.getattr("id")?.extract::()? == "setup" || // we also look for main as sometimes this is used instead of // setup() statement.getattr("value")?.getattr("func")?.getattr("id")?.extract::()? == "main") { let value = statement.getattr("value")?; // Process the arguments to the setup function for kw in value.getattr("keywords")?.iter()? { let kw = kw?; let arg_name = kw.getattr("arg")?.extract::()?; setup_args.insert(arg_name, kw.getattr("value")?.to_object(py)); } } } // End code from https://github.com/nexB/scancode-toolkit/blob/develop/src/packagedcode/pypi.py let ast_str = ast.getattr("Str").unwrap(); let ast_constant = ast.getattr("Constant").unwrap(); let get_str_from_expr = |expr: &Bound| -> Option { if expr.is_instance(&ast_str).ok()? { Some(expr.getattr("s").ok()?.extract::().ok()?) } else if expr.is_instance(&ast_constant).ok()? { Some(expr.getattr("value").ok()?.extract::().ok()?) } else { None } }; let ast_list = ast.getattr("List").unwrap(); let ast_tuple = ast.getattr("Tuple").unwrap(); let ast_set = ast.getattr("Set").unwrap(); let get_str_list_from_expr = |expr: &Bound| -> Option> { // We collect the elements of a list if the element // and tag function calls if expr.is_instance(&ast_list).ok()? || expr.is_instance(&ast_tuple).ok()? || expr.is_instance(&ast_set).ok()? { let mut ret = Vec::new(); for elt in expr.getattr("elts").ok()?.iter().ok()? { let elt = elt.ok()?; if let Some(value) = get_str_from_expr(&elt) { ret.push(value); } else { return None; } } Some(ret) } else { None } }; let ast = py.import_bound("ast").unwrap(); let ast_dict = ast.getattr("Dict").unwrap(); let get_dict_from_expr = |expr: &Bound| -> Option> { if expr.is_instance(&ast_dict).ok()? { let mut ret = HashMap::new(); let keys = expr.getattr("keys").ok()?; let values = expr.getattr("values").ok()?; for (key, value) in keys.iter().ok()?.zip(values.iter().ok()?) { if let Some(key) = get_str_from_expr(&key.ok()?) { if let Some(value) = get_str_from_expr(&value.ok()?) { ret.insert(key, value); } else { return None; } } else { return None; } } Some(ret) } else { None } }; // TODO: what if kw.value is an expression like a call to // version=get_version or version__version__ for (key, value) in setup_args.iter() { let value = value.bind(py); match key.as_str() { "name" => { if let Some(name) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "version" => { if let Some(version) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "description" => { if let Some(description) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()) }); } } "long_description" => { if let Some(description) = get_str_from_expr(value) { let content_type = setup_args.get("long_description_content_type"); let content_type = if let Some(content_type) = content_type { get_str_from_expr(content_type.bind(py)) } else { None }; long_description = Some((description, content_type)); } } "license" => { if let Some(license) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "download_url" => { if let Some(download_url) = get_str_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(download_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "url" => { if let Some(url) = get_str_from_expr(value) { urls.push(url.clone()); } } "project_urls" => { if let Some(project_urls) = get_dict_from_expr(value) { ret.extend(parse_python_project_urls(project_urls.into_iter(), &Origin::Path(path.into()))); } } "maintainer" => { if let Some(maintainer) = get_str_from_expr(value) { let maintainer_email = setup_args.get("maintainer_email"); let maintainer_email = if let Some(maintainer_email) = maintainer_email { get_str_from_expr(maintainer_email.bind(py)) } else { None }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(maintainer), email: maintainer_email, url: None }), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "author" => { if let Some(author) = get_str_from_expr(value) { let author_email = setup_args.get("author_email"); let author_email = if let Some(author_email) = author_email { get_str_from_expr(author_email.bind(py)) } else { None }; ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(author), email: author_email, url: None }]), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(author) = get_str_list_from_expr(value) { let author_emails = setup_args.get("author_email"); let author_emails = if let Some(author_emails) = author_emails { get_str_list_from_expr(author_emails.bind(py)).map_or_else(|| vec![None; author.len()], |v| v.into_iter().map(Some).collect()) } else { vec![None; author.len()] }; let persons = author.into_iter().zip(author_emails.into_iter()).map(|(name, email)| Person { name: Some(name), email, url: None }).collect(); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(persons), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "keywords" => { if let Some(keywords) = get_str_list_from_expr(value) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords(keywords), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } "classifiers" => { if let Some(classifiers) = get_str_list_from_expr(value) { ret.extend(parse_python_classifiers(classifiers.iter().map(|s| s.as_str()), &Origin::Path(path.into()))); } } // Handled above "author_email" | "maintainer_email" => {}, // Irrelevant "rust_extensions" | "data_files" | "packages" | "package_dir" | "entry_points" => {}, // Irrelevant: dependencies t if t.ends_with("_requires") || t.ends_with("_require") => {}, _ => { warn!("Unknown key in setup.py: {}", key); } } } Ok::<(), PyErr>(()) }).map_err(|e: PyErr| { Python::with_gil(|py| { if e.is_instance_of::(py) { warn!("Syntax error while parsing setup.py: {}", e); ProviderError::Other(e.to_string()) } else { warn!("Failed to parse setup.py: {}", e); ProviderError::Other(e.to_string()) } }) })?; if let Some((description, content_type)) = long_description { ret.extend(parse_python_long_description( description.as_str(), content_type.as_deref(), &Origin::Path(path.into()), )?); } for url in urls { ret.extend(parse_python_url(url.as_str()).await); } Ok(ret) } fn parse_python_classifiers<'a>( classifiers: impl Iterator + 'a, origin: &'a Origin, ) -> impl Iterator + 'a { classifiers.filter_map(|classifier| { let mut parts = classifier.split(" :: "); let category = parts.next()?; let subcategory = parts.next()?; let value = parts.next()?; let certainty = Some(Certainty::Certain); let origin = Some(origin.clone()); match (category, subcategory) { ("Development Status", _) => None, ("Intended Audience", _) => None, ("License", "OSI Approved") => Some(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(value.into()), certainty, origin, }), ("Natural Language", _) => None, ("Operating System", _) => None, ("Programming Language", _) => None, ("Topic", _) => None, _ => { warn!("Unknown classifier: {}", classifier); None } } }) } #[derive(Deserialize)] pub struct PypiProjectInfo { pub author: Option, pub author_email: Option, pub bugtrack_url: Option, pub classifiers: Vec, pub description: String, pub description_content_type: Option, pub docs_url: Option, pub download_url: Option, pub downloads: HashMap, pub dynamic: Option, pub home_page: Option, pub keywords: Option, pub license: Option, pub maintainer: Option, pub maintainer_email: Option, pub name: String, pub package_url: String, pub platform: Option, pub project_url: String, pub project_urls: Option>, pub provides_extra: Option, pub release_url: String, pub requires_dist: Option>, pub requires_python: Option, pub summary: String, pub version: String, pub yanked: Option, pub yanked_reason: Option, } #[derive(Deserialize)] pub struct Digests { pub md5: String, pub sha256: String, pub blake2b_256: String, } #[derive(Deserialize)] pub struct PypiRelease { pub comment_text: String, pub digests: Digests, pub downloads: isize, pub filename: String, pub has_sig: bool, pub md5_digest: String, pub packagetype: String, pub python_version: String, pub requires_python: Option, pub size: isize, pub upload_time: String, pub upload_time_iso_8601: String, pub url: String, pub yanked: bool, pub yanked_reason: Option, } #[derive(Deserialize)] pub struct PypiUrl { pub comment_text: String, pub digests: Digests, pub filename: String, pub has_sig: bool, pub packagetype: String, pub python_version: String, pub requires_python: Option, pub size: isize, pub upload_time: String, pub upload_time_iso_8601: String, pub url: String, pub yanked: bool, pub yanked_reason: Option, } #[derive(Deserialize)] pub struct PypiProject { pub info: PypiProjectInfo, pub last_serial: isize, pub releases: HashMap>, pub urls: Vec, pub vulnerabilities: Vec, } impl TryInto for PypiProject { type Error = ProviderError; fn try_into(self) -> Result { let mut metadata = UpstreamMetadata::default(); if let Some(author) = self.info.author { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person { name: Some(author), email: self.info.author_email, url: None, }]), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(self.info.description), certainty: Some(Certainty::Certain), origin: None, }); if let Some(homepage) = self.info.home_page { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = self.info.license { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(self.info.name), certainty: Some(Certainty::Certain), origin: None, }); if let Some(maintainer) = self.info.maintainer { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some(maintainer), email: self.info.maintainer_email, url: None, }), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(self.info.version), certainty: Some(Certainty::Certain), origin: None, }); if let Some(keywords) = self.info.keywords { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Keywords( keywords.split(',').map(|s| s.trim().to_string()).collect(), ), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(urls) = self.info.project_urls { metadata.0.extend(parse_python_project_urls( urls.into_iter(), &Origin::Other("pypi".to_string()), )); } for url_data in self.urls { if url_data.packagetype == "sdist" { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(url_data.url), certainty: Some(Certainty::Certain), origin: None, }); } } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(self.info.summary), certainty: Some(Certainty::Certain), origin: None, }); Ok(metadata) } } pub async fn load_pypi_project(name: &str) -> Result, ProviderError> { let http_url = format!("https://pypi.org/pypi/{}/json", name) .parse() .unwrap(); let data = crate::load_json_url(&http_url, None).await?; let pypi_data: PypiProject = serde_json::from_value(data).map_err(|e| crate::ProviderError::Other(e.to_string()))?; Ok(Some(pypi_data)) } pub async fn remote_pypi_metadata(name: &str) -> Result { let pypi = load_pypi_project(name).await?; match pypi { Some(pypi) => pypi.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod pypi_tests { use super::*; #[test] fn test_pypi_upstream_info() { let data = include_str!("../testdata/pypi.json"); let pypi_data: PypiProject = serde_json::from_str(data).unwrap(); assert_eq!(pypi_data.info.name, "merge3"); } } upstream-ontologist-0.2.4/src/providers/r.rs000064400000000000000000000236731046102023000173140ustar 00000000000000//! See https://r-pkgs.org/description.html use crate::{ vcs, Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, }; #[cfg(feature = "r-description")] pub async fn guess_from_r_description( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { use std::str::FromStr; let contents = std::fs::read_to_string(path)?; // TODO: Use parse_relaxed let msg = r_description::lossy::RDescription::from_str(&contents) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let mut results = Vec::new(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(msg.name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); if let Some(repository) = msg.repository { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(bug_reports) = msg.bug_reports { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_reports.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(msg.version.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(msg.license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(msg.title), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); let lines: Vec<&str> = msg.description.split_inclusive('\n').collect(); if !lines.is_empty() { let reflowed = format!("{}{}", lines[0], textwrap::dedent(&lines[1..].concat())); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(reflowed), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(maintainer) = msg.maintainer { let person = Person::from(maintainer.as_str()); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(person), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(urls) = msg.url { if urls.len() == 1 { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(urls[0].url.to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for entry in urls { let url = &entry.url; let label = entry.label.as_deref(); if let Some(hostname) = url.host_str() { if hostname == "bioconductor.org" { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("Bioconductor".to_string()), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if label.map(str::to_lowercase).as_deref() == Some("devel") || label.map(str::to_lowercase).as_deref() == Some("repository") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if label.map(str::to_lowercase).as_deref() == Some("homepage") { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } else if let Some(repo_url) = vcs::guess_repo_from_url(url, None).await { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } } } } Ok(results) } #[cfg(test)] #[cfg(feature = "r-description")] mod description_tests { use super::*; #[tokio::test] async fn test_read() { let td = tempfile::tempdir().unwrap(); let path = td.path().join("DESCRIPTION"); std::fs::write( &path, r#"Package: crul Title: HTTP Client Description: A simple HTTP client, with tools for making HTTP requests, and mocking HTTP requests. The package is built on R6, and takes inspiration from Ruby's 'faraday' gem () The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' (). Version: 0.8.4 License: MIT + file LICENSE Authors@R: c( person("Scott", "Chamberlain", role = c("aut", "cre"), email = "myrmecocystus@gmail.com", comment = c(ORCID = "0000-0003-1444-9135")) ) URL: https://github.com/ropensci/crul (devel) https://ropenscilabs.github.io/http-testing-book/ (user manual) https://www.example.com/crul (homepage) BugReports: https://github.com/ropensci/crul/issues Encoding: UTF-8 Language: en-US Imports: curl (>= 3.3), R6 (>= 2.2.0), urltools (>= 1.6.0), httpcode (>= 0.2.0), jsonlite, mime Suggests: testthat, fauxpas (>= 0.1.0), webmockr (>= 0.1.0), knitr VignetteBuilder: knitr RoxygenNote: 6.1.1 X-schema.org-applicationCategory: Web X-schema.org-keywords: http, https, API, web-services, curl, download, libcurl, async, mocking, caching X-schema.org-isPartOf: https://ropensci.org NeedsCompilation: no Packaged: 2019-08-02 19:58:21 UTC; sckott Author: Scott Chamberlain [aut, cre] () Maintainer: Scott Chamberlain Repository: CRAN Date/Publication: 2019-08-02 20:30:02 UTC "#, ) .unwrap(); let ret = guess_from_r_description(&path, &GuesserSettings::default()) .await .unwrap(); assert_eq!( ret, vec![ UpstreamDatumWithMetadata { datum: UpstreamDatum::Name("crul".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("CRAN".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase( "https://github.com/ropensci/crul/issues".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Version("0.8.4".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::License("MIT + file LICENSE".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary("HTTP Client".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Description( r#"A simple HTTP client, with tools for making HTTP requests, and mocking HTTP requests. The package is built on R6, and takes inspiration from Ruby's 'faraday' gem () The package name is a play on curl, the widely used command line tool for HTTP, and this package is built on top of the R package 'curl', an interface to 'libcurl' ()."# .to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Maintainer(Person { name: Some("Scott Chamberlain".to_string()), email: Some("myrmecocystus@gmail.com".to_string()), url: None }), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( "https://github.com/ropensci/crul".to_string() ), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()), }, UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage("https://www.example.com/crul".to_string()), certainty: Some(Certainty::Certain), origin: Some(path.clone().into()) }, ] ); } } upstream-ontologist-0.2.4/src/providers/repology.rs000064400000000000000000000040511046102023000207000ustar 00000000000000use crate::UpstreamDatum; use std::collections::HashMap; #[allow(dead_code)] #[derive(serde::Deserialize)] struct Project { pub name: String, pub status: Option, pub www: Vec, pub licenses: Vec, pub summary: Option, pub downloads: Vec, } pub async fn guess_from_repology( repology_project: &str, ) -> Result, crate::ProviderError> { let metadata: Vec = serde_json::from_value( if let Some(value) = crate::get_repology_metadata(repology_project, None).await { value } else { return Ok(Vec::new()); }, ) .unwrap(); let mut fields = HashMap::new(); let mut add_field = |name, value, add| { *fields .entry(name) .or_insert(HashMap::new()) .entry(value) .or_insert(0) += add; }; for entry in metadata { let score = if entry.status.as_deref() == Some("outdated") { 1 } else { 10 }; for www in entry.www { add_field("Homepage", www, score); } for license in entry.licenses { add_field("License", license, score); } if let Some(summary) = entry.summary { add_field("Summary", summary, score); } for download in entry.downloads { add_field("Download", download, score); } } Ok(fields .into_iter() .map(|(name, scores)| { ( name.to_string(), scores .into_iter() .max_by_key(|(_, score)| *score) .unwrap() .0, ) }) .map(|(f, v)| match f.as_str() { "Homepage" => UpstreamDatum::Homepage(v), "License" => UpstreamDatum::License(v), "Summary" => UpstreamDatum::Summary(v), "Download" => UpstreamDatum::Download(v), _ => unreachable!(), }) .collect()) } upstream-ontologist-0.2.4/src/providers/ruby.rs000064400000000000000000000244131046102023000200250ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use log::debug; use serde::Deserialize; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub async fn guess_from_gemspec( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); #[derive(Debug)] enum GemValue { String(String), Array(Vec), } impl GemValue { fn as_str(&self) -> Option<&str> { match self { GemValue::String(s) => Some(s), GemValue::Array(_) => None, } } fn as_array(&self) -> Option<&Vec> { match self { GemValue::String(_) => None, GemValue::Array(a) => Some(a), } } } fn parse_value(value: &str) -> Result { let trimmed = value.trim(); if (trimmed.starts_with('"') && trimmed.ends_with('"')) || (trimmed.starts_with('\'') && trimmed.ends_with('\'')) { return Ok(GemValue::String(trimmed[1..trimmed.len() - 1].to_string())); } else if trimmed.starts_with('"') || trimmed.starts_with("'.freeze") { return Ok(GemValue::String(trimmed[1..].to_string())); } else if trimmed.starts_with('[') && trimmed.ends_with(']') { let elements = trimmed[1..trimmed.len() - 1] .split(',') .map(parse_value) .collect::, _>>()?; return Ok(GemValue::Array(elements)); } Err(format!("Could not parse value: {}", value)) } for line in reader.lines().map_while(Result::ok) { if line.starts_with('#') { continue; } if line.trim().is_empty() { continue; } if line == "Gem::Specification.new do |s|\n" || line == "end\n" { continue; } if let Some(line) = line.strip_prefix(" s.") { let (key, rawval) = match line.split_once('=') { Some((key, rawval)) => (key.trim(), rawval), _ => continue, }; let val = match parse_value(rawval.trim()) { Ok(val) => val, Err(_) => { debug!("Could not parse value: {}", rawval); continue; } }; match key { "name" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "version" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "homepage" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "summary" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "description" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "license" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(val.as_str().unwrap().to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), "authors" => results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author( val.as_array() .unwrap() .iter() .map(|p| Person::from(p.as_str().unwrap())) .collect(), ), certainty: Some(Certainty::Certain), origin: Some(path.into()), }), _ => debug!("unknown field {} ({:?}) in gemspec", key, val), } } else { debug!("ignoring unparsable line in {}: {:?}", path.display(), line); } } Ok(results) } #[derive(Deserialize)] pub struct RubygemMetadata { pub changelog_uri: Option, pub source_code_uri: Option, } #[derive(Deserialize)] pub struct RubygemDependency { pub name: String, pub requirements: String, } #[derive(Deserialize)] pub struct RubygemDependencies { pub development: Vec, pub runtime: Vec, } #[derive(Deserialize)] pub struct Rubygem { pub name: String, pub downloads: usize, pub version: String, pub version_created_at: String, pub version_downloads: usize, pub platform: String, pub authors: String, pub info: String, pub licenses: Vec, pub metadata: RubygemMetadata, pub yanked: bool, pub sha: String, pub spec_sha: String, pub project_uri: url::Url, pub gem_uri: url::Url, pub homepage_uri: Option, pub wiki_uri: Option, pub documentation_uri: Option, pub mailing_list_uri: Option, pub source_code_uri: Option, pub bug_tracker_uri: Option, pub changelog_uri: Option, pub funding_uri: Option, pub dependencies: RubygemDependencies, } impl TryFrom for UpstreamMetadata { type Error = ProviderError; fn try_from(gem: Rubygem) -> Result { let mut metadata = UpstreamMetadata::default(); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(gem.name), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(gem.version), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(vec![Person::from(gem.authors.as_str())]), certainty: Some(Certainty::Certain), origin: None, }); metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(gem.homepage_uri.unwrap_or(gem.project_uri).to_string()), certainty: Some(Certainty::Certain), origin: None, }); if let Some(wiki_uri) = gem.wiki_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Wiki(wiki_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(mailing_list_uri) = gem.mailing_list_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::MailingList(mailing_list_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(bug_tracker_uri) = gem.bug_tracker_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(bug_tracker_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(funding_uri) = gem.funding_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Funding(funding_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(source_code_uri) = gem.source_code_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(source_code_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(gem.licenses.join(", ")), certainty: Some(Certainty::Certain), origin: None, }); if let Some(documentation_uri) = gem.documentation_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(documentation_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(changelog_uri) = gem.changelog_uri { metadata.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Changelog(changelog_uri.to_string()), certainty: Some(Certainty::Certain), origin: None, }); } Ok(metadata) } } pub async fn load_rubygem(name: &str) -> Result, ProviderError> { let url = format!("https://rubygems.org/api/v1/gems/{}.json", name) .parse() .unwrap(); let data = crate::load_json_url(&url, None).await?; let gem: Rubygem = serde_json::from_value(data).unwrap(); Ok(Some(gem)) } pub async fn remote_rubygem_metadata(name: &str) -> Result { let gem = load_rubygem(name).await?; match gem { Some(gem) => gem.try_into(), None => Ok(UpstreamMetadata::default()), } } #[cfg(test)] mod tests { #[test] fn test_parse_gem() { let gemspec = include_str!("../testdata/rubygem.json"); let gem: super::Rubygem = serde_json::from_str(gemspec).unwrap(); assert_eq!(gem.name, "bullet"); } } upstream-ontologist-0.2.4/src/providers/rust.rs000064400000000000000000000304071046102023000200410ustar 00000000000000use crate::{ Certainty, GuesserSettings, Person, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata, UpstreamMetadata, }; use serde::Deserialize; use std::collections::HashMap; #[cfg(feature = "cargo")] #[derive(Deserialize)] struct CargoToml { package: Option, workspace: Option, } #[cfg(feature = "cargo")] #[derive(Deserialize)] struct CargoWorkspace { #[serde(default)] package: Option, } #[cfg(feature = "cargo")] /// Allow either specifying setting T directly or "workspace = true" pub enum DirectOrWorkspace { Direct(T), Workspace, } #[cfg(feature = "cargo")] impl<'de, T: serde::Deserialize<'de>> serde::Deserialize<'de> for DirectOrWorkspace { fn deserialize(deserializer: D) -> Result, D::Error> where D: serde::Deserializer<'de>, { // Assume deserializing T, but if that fails, check for a table with "workspace = true" let v: toml::value::Value = serde::Deserialize::deserialize(deserializer)?; match T::deserialize(v.clone()) { Ok(t) => Ok(DirectOrWorkspace::Direct(t)), Err(_) => { let table = v.as_table().ok_or_else(|| { serde::de::Error::custom("expected either a value or a table") })?; if table.get("workspace").and_then(|v| v.as_bool()) == Some(true) { Ok(DirectOrWorkspace::Workspace) } else { Err(serde::de::Error::custom( "expected either a value or a table", )) } } } } } #[cfg(feature = "cargo")] #[derive(Deserialize)] struct CargoPackage { name: Option, #[serde(default)] version: Option>, #[serde(default)] authors: Option>, #[serde(default)] description: Option>, #[serde(default)] homepage: Option>, #[serde(default)] repository: Option>, #[serde(default)] license: Option>, } #[cfg(feature = "cargo")] macro_rules! resolve { ($workspace:expr, $package:expr, $field:ident) => { match $package.$field { Some(DirectOrWorkspace::Direct(ref s)) => Some(s.clone()), Some(DirectOrWorkspace::Workspace) => { if let Some(DirectOrWorkspace::Direct(ref s)) = $workspace.package.as_ref().and_then(|p| p.$field.as_ref()) { Some(s.clone()) } else { None } } None => None, } }; } #[cfg(feature = "cargo")] pub fn guess_from_cargo( path: &std::path::Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { // see https://doc.rust-lang.org/cargo/reference/manifest.html let doc: CargoToml = toml::from_str(&std::fs::read_to_string(path)?) .map_err(|e| ProviderError::ParseError(e.to_string()))?; let package = match doc.package { Some(p) => p, None => { log::debug!("No package section in Cargo.toml"); return Ok(Vec::new()); } }; let workspace = match doc.workspace { Some(w) => w, None => CargoWorkspace { package: None }, }; let mut results = Vec::new(); if let Some(name) = package.name { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.clone()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate(name), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(description) = resolve!(workspace, package, description) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(homepage) = resolve!(workspace, package, homepage) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(license) = resolve!(workspace, package, license) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(repository) = resolve!(workspace, package, repository) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(version) = resolve!(workspace, package, version) { results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } if let Some(authors) = package.authors { let authors = authors.iter().map(|a| Person::from(a.as_str())).collect(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Author(authors), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); } Ok(results) } pub async fn cargo_translate_dashes( crate_name: &str, ) -> Result, crate::HTTPJSONError> { let url = format!("https://crates.io/api/v1/crates?q={}", crate_name) .parse() .unwrap(); let json: serde_json::Value = crate::load_json_url(&url, None).await?; // Navigate through the JSON response to find the crate name. if let Some(crates) = json.get("crates").and_then(|c| c.as_array()) { for krate in crates { if let Some(name) = krate.get("id").and_then(|n| n.as_str()) { return Ok(Some(name.to_string())); } } } Ok(None) } #[derive(Deserialize)] pub struct Crate { pub badges: Vec, pub created_at: String, pub description: Option, pub documentation: Option, pub downloads: i64, pub homepage: Option, pub id: String, pub keywords: Vec, pub license: Option, pub links: HashMap>, pub max_stable_version: semver::Version, pub max_version: semver::Version, pub name: String, pub newest_version: semver::Version, pub recent_downloads: i64, pub repository: Option, pub updated_at: String, pub versions: Option>, } #[derive(Deserialize)] pub struct User { pub avatar: String, pub id: i32, pub login: String, pub name: String, pub url: String, } #[derive(Deserialize)] pub struct AuditAction { pub action: String, pub time: String, pub user: User, } #[derive(Deserialize)] pub struct CrateVersion { pub audit_actions: Vec, pub bin_names: Vec, pub checksum: String, #[serde(rename = "crate")] pub crate_: String, pub crate_size: i64, pub created_at: String, pub dl_path: String, pub downloads: i64, pub features: HashMap>, pub has_lib: bool, pub id: i32, pub lib_links: Option>, pub license: Option, pub links: HashMap>, pub num: semver::Version, pub published_by: Option, pub readme_path: String, pub rust_version: Option, pub updated_at: String, pub yanked: bool, } #[derive(Deserialize)] pub struct CrateInfo { pub categories: Vec, #[serde(rename = "crate")] crate_: Crate, pub keywords: Vec, pub versions: Vec, } impl TryFrom for UpstreamMetadata { type Error = crate::ProviderError; fn try_from(value: CrateInfo) -> Result { let mut ret = UpstreamMetadata::default(); ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(value.crate_.name.to_string()), certainty: Some(Certainty::Certain), origin: None, }); if let Some(homepage) = value.crate_.homepage { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(homepage), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(repository) = value.crate_.repository { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repository), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(description) = value.crate_.description { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(description), certainty: Some(Certainty::Certain), origin: None, }); } if let Some(license) = value.crate_.license { ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(license), certainty: Some(Certainty::Certain), origin: None, }); } ret.insert(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(value.crate_.newest_version.to_string()), certainty: Some(Certainty::Certain), origin: None, }); Ok(ret) } } pub async fn load_crate_info(cratename: &str) -> Result, crate::ProviderError> { let http_url = format!("https://crates.io/api/v1/crates/{}", cratename); let data = crate::load_json_url(&http_url.parse().unwrap(), None).await?; Ok(Some(serde_json::from_value(data).unwrap())) } // TODO: dedupe with TryFrom implementation above fn parse_crates_io(data: &CrateInfo) -> Vec { let crate_data = &data.crate_; let mut results = Vec::new(); results.push(UpstreamDatum::Name(crate_data.name.to_string())); if let Some(homepage) = crate_data.homepage.as_ref() { results.push(UpstreamDatum::Homepage(homepage.to_string())); } if let Some(repository) = crate_data.repository.as_ref() { results.push(UpstreamDatum::Repository(repository.to_string())); } if let Some(description) = crate_data.description.as_ref() { results.push(UpstreamDatum::Summary(description.to_string())); } if let Some(license) = crate_data.license.as_ref() { results.push(UpstreamDatum::License(license.to_string())); } results.push(UpstreamDatum::Version( crate_data.newest_version.to_string(), )); results } pub struct CratesIo; impl Default for CratesIo { fn default() -> Self { Self::new() } } impl CratesIo { pub fn new() -> Self { Self } } #[async_trait::async_trait] impl crate::ThirdPartyRepository for CratesIo { fn name(&self) -> &'static str { "crates.io" } fn max_supported_certainty(&self) -> Certainty { Certainty::Certain } fn supported_fields(&self) -> &'static [&'static str] { &["Homepage", "Name", "Repository", "Version", "Summary"][..] } async fn guess_metadata(&self, name: &str) -> Result, ProviderError> { let data = load_crate_info(name).await?; if data.is_none() { return Ok(Vec::new()); } Ok(parse_crates_io(&data.unwrap())) } } pub async fn remote_crate_data(name: &str) -> Result { let data = load_crate_info(name).await?; if let Some(data) = data { Ok(data.try_into()?) } else { Ok(UpstreamMetadata::default()) } } #[cfg(test)] mod crates_io_tests { use super::*; #[test] fn test_load_crate_info() { let data = include_str!("../testdata/crates.io.json"); let crate_info: CrateInfo = serde_json::from_str(data).unwrap(); assert_eq!(crate_info.crate_.name, "breezy"); } } upstream-ontologist-0.2.4/src/providers/security_md.rs000064400000000000000000000014321046102023000213670ustar 00000000000000//! https://docs.github.com/en/free-pro-team@latest/github/\ //! managing-security-vulnerabilities/adding-a-security-policy-to-your-repository use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; pub fn guess_from_security_md( name: &str, path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, ProviderError> { let path = path.strip_prefix("./").unwrap_or(path); let mut results = Vec::new(); // TODO(jelmer): scan SECURITY.md for email addresses/URLs with instructions results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::SecurityMD(name.to_string()), certainty: Some(Certainty::Certain), origin: Some(path.into()), }); Ok(results) } upstream-ontologist-0.2.4/src/providers/waf.rs000064400000000000000000000025551046102023000176240ustar 00000000000000use crate::{Certainty, GuesserSettings, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use lazy_regex::regex; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; pub fn guess_from_wscript( path: &Path, _settings: &GuesserSettings, ) -> std::result::Result, ProviderError> { let file = File::open(path)?; let reader = BufReader::new(file); let mut results = Vec::new(); let appname_regex = regex!("APPNAME = [\'\"](.*)[\'\"]"); let version_regex = regex!("VERSION = [\'\"](.*)[\'\"]"); for line in reader.lines().map_while(Result::ok) { if let Some(captures) = appname_regex.captures(&line) { let name = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } if let Some(captures) = version_regex.captures(&line) { let version = captures.get(1).unwrap().as_str().to_owned(); results.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version), certainty: Some(Certainty::Confident), origin: Some(path.into()), }); } } Ok(results) } upstream-ontologist-0.2.4/src/readme.rs000064400000000000000000001266171046102023000162750ustar 00000000000000use crate::{Certainty, Origin, ProviderError, UpstreamDatum, UpstreamDatumWithMetadata}; use lazy_regex::regex; use regex::Regex; use select::document::Document; use select::node::Node; use select::predicate::{And, Class, Name, Text}; use std::io::BufRead; use std::iter::Iterator; use url::Url; pub fn skip_paragraph(para: &str) -> (bool, Vec) { let mut ret = Vec::::new(); let re = regex!(r"(?ms)^See .* for more (details|information)\."); if re.is_match(para) { return (true, ret); } let re = regex!(r"(?ms)^See .* for instructions"); if re.is_match(para) { return (true, ret); } let re = regex!(r"(?ms)^Please refer .*\."); if re.is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^It is licensed under (.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^License: (.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^(Home page|homepage_url|Main website|Website|Homepage): (.*)").captures(para) { let mut url = m.get(2).unwrap().as_str().to_string(); if url.starts_with('<') && url.ends_with('>') { url = url[1..url.len() - 1].to_string(); } ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(url), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^More documentation .* at http.*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^Documentation (can be found|is hosted|is available) (at|on) ([^ ]+)") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(m.get(3).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^Documentation for (.*)\s+(can\s+be\s+found|is\s+hosted)\s+(at|on)\s+([^ ]+)") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(m.get(4).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^Documentation[, ].*found.*(at|on).*\.").is_match(para) { return (true, ret); } if regex!(r"(?ms)^See (http.*|gopkg.in.*|github.com.*)").is_match(para) { return (true, ret); } if regex!(r"(?ms)^Available on (.*)").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^This software is freely distributable under the (.*) license.*") .captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if regex!(r"(?ms)^This .* is hosted at .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^This code has been developed by .*").is_match(para) { return (true, ret); } if para.starts_with("Download and install using:") { return (true, ret); } if regex!(r"(?ms)^Bugs should be reported by .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^The bug tracker can be found at (http[^ ]+[^.])").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Likely), origin: None, }); return (true, ret); } if let Some(m) = regex!(r"(?ms)^Copyright (\(c\) |)(.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Copyright(m.get(2).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if regex!(r"(?ms)^You install .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^This .* is free software; .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^Please report any bugs(.*) to <(.*)>").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.get(2).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } if regex!(r"(?ms)^Share and Enjoy").is_match(para) { return (true, ret); } let lines = para.lines().collect::>(); if !lines.is_empty() && ["perl Makefile.PL", "make", "./configure"].contains(&lines[0].trim()) { return (true, ret); } if regex!(r"(?ms)^For further information, .*").is_match(para) { return (true, ret); } if regex!(r"(?ms)^Further information .*").is_match(para) { return (true, ret); } if let Some(m) = regex!(r"(?ms)^A detailed ChangeLog can be found.*:\s+(http.*)").captures(para) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Changelog(m.get(1).unwrap().as_str().to_string()), certainty: Some(Certainty::Possible), origin: None, }); return (true, ret); } (false, ret) } pub fn description_from_readme_rst( long_description: &str, ) -> Result<(Option, Vec), ProviderError> { // Work around https://github.com/flying-sheep/rust-rst/issues/55 let mut fields: Vec<(&str, String)> = Vec::new(); let mut in_field = false; let long_description = long_description .lines() .filter(|line| { // Filter out field lists. Syntax is: // :field: value // with possible continuation lines that are indented. // field can contain any character except a colon followed by a space unless // it is escaped with a backslash. if line.starts_with([' ', '\t'].as_ref()) && in_field { if in_field { fields.last_mut().unwrap().1.push_str(line.trim()); return false; } return true; } else { in_field = false; } if let Some((_, field, value)) = lazy_regex::regex_captures!(r"^:([^:]+): (.*)", line) { fields.push((field, value.to_string())); in_field = true; false } else { line != &"----" } }) .collect::>() .join("\n") + "\n"; let html = rst_to_html(&long_description); let (description, mut md) = description_from_readme_html(&html)?; for (field, value) in fields { md.extend(parse_field(field, &NodeOrText::Text(&value))); } Ok((description, md)) } pub fn description_from_readme_md( long_description: &str, ) -> Result<(Option, Vec), ProviderError> { let parser = pulldown_cmark::Parser::new(long_description); let mut html_output = String::new(); pulldown_cmark::html::push_html(&mut html_output, parser); description_from_readme_html(&html_output) } pub async fn guess_from_readme( path: &std::path::Path, _trust_package: bool, ) -> Result, ProviderError> { let mut urls: Vec = vec![]; let mut ret = vec![]; let f = std::fs::File::open(path)?; let reader = std::io::BufReader::new(f); let mut line_iter = reader.lines(); loop { let line = if let Some(line) = line_iter.next() { line? } else { break; }; let line = line.trim(); let mut cmdline = line.strip_prefix('$').unwrap_or(line).trim().to_string(); if cmdline.starts_with("git clone ") || cmdline.starts_with("fossil clone ") || cmdline.starts_with("hg clone ") || cmdline.starts_with("bzr co ") || cmdline.starts_with("bzr branch ") { while cmdline.ends_with('\\') { let next_line = line_iter.next().unwrap()?; cmdline = format!("{} {}", cmdline, next_line.trim()); } if let Some(url) = crate::vcs_command::url_from_vcs_command(cmdline.as_bytes()) { urls.push(url.parse().unwrap()); } } for m in lazy_regex::regex!("[\"'`](git clone.*)[\"`']").captures_iter(line) { if let Some(url) = crate::vcs_command::url_from_git_clone_command( m.get(1).unwrap().as_str().as_bytes(), ) { urls.push(url.parse().unwrap()); } } if let Some(m) = lazy_regex::regex_find!(r"cvs.*-d\s*:pserver:.*", line) { if let Some(url) = crate::vcs_command::url_from_cvs_co_command(m.as_bytes()) { urls.push(url.parse().unwrap()); } } for m in lazy_regex::regex!("($ )?(svn co .*)").captures_iter(line) { if let Some(url) = crate::vcs_command::url_from_svn_co_command(m.get(2).unwrap().as_str().as_bytes()) { urls.push(url.parse().unwrap()); } } const PROJECT_RE: &str = "([^/]+)/([^/?.()\"#>\\s]*[^-,/?.()\"#>\\s])"; for m in regex::Regex::new(format!("https://travis-ci.org/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}/{}", m.get(1).unwrap().as_str(), m.get(2).unwrap().as_str() )), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://coveralls.io/r/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}/{}", m.get(1).unwrap().as_str(), m.get(2).unwrap().as_str() )), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex!("https://github.com/([^/]+)/([^/]+)/issues").find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(m.as_str().to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://github.com/{}/(.git)?", PROJECT_RE).as_str()) .unwrap() .find_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.as_str().trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in regex::Regex::new(format!("https://github.com/{}", PROJECT_RE).as_str()) .unwrap() .captures_iter(line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository( m.get(0).unwrap().as_str().trim_end_matches('.').to_string(), ), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } if let Some(m) = lazy_regex::regex_find!(r"git://([^ ]+)", line) { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(m.trim_end_matches('.').to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } for m in lazy_regex::regex_find!("https://([^]/]+)/([^]\\s()\"#]+)", line) { let url = m.trim_end_matches('.'); if crate::vcs::is_gitlab_site(m, None).await { if let Some(repo_url) = crate::vcs::guess_repo_from_url(&url.parse().unwrap(), None).await { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(repo_url), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } else { log::warn!("Ignoring invalid URL {} in {}", url, path.display()); } } } } let (description, extra_metadata) = match path.extension().and_then(|s| s.to_str()) { Some("md") => { let contents = std::fs::read_to_string(path)?; description_from_readme_md(&contents) } Some("rst") => { let contents = std::fs::read_to_string(path)?; description_from_readme_rst(&contents) } None => { let contents = std::fs::read_to_string(path)?; Ok(description_from_readme_plain(&contents)?) } Some("pod") => { let contents = std::fs::read_to_string(path)?; let metadata = crate::providers::perl::guess_from_pod( &contents, &Origin::Path(path.to_path_buf()), )?; Ok((None, metadata)) } _ => Ok((None, vec![])), }?; if let Some(description) = description { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Description(description), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } ret.extend(extra_metadata); let prefer_public = |url: &url::Url| -> i32 { if url.scheme().contains("ssh") { 1 } else { 0 } }; urls.sort_by_key(prefer_public); if !urls.is_empty() { ret.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(urls.remove(0).to_string()), certainty: Some(Certainty::Possible), origin: Some(path.into()), }); } Ok(ret) } pub fn parse_first_header_text(text: &str) -> (Option<&str>, Option<&str>, Option<&str>) { if let Some((_, name, version)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) ([0-9.]+)$", text) { return (Some(name), None, Some(version)); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+): (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) - (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, summary)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) -- (.+)$", text) { return (Some(name), Some(summary), None); } if let Some((_, name, version)) = lazy_regex::regex_captures!(r"^([A-Za-z]+) version ([^ ]+)", text) { return (Some(name), None, Some(version)); } (None, None, None) } #[test] fn test_parse_first_header_text() { assert_eq!( parse_first_header_text("libwand 1.0"), (Some("libwand"), None, Some("1.0")) ); assert_eq!( parse_first_header_text("libwand -- A wand"), (Some("libwand"), Some("A wand"), None) ); assert_eq!( parse_first_header_text("libwand version 1.0"), (Some("libwand"), None, Some("1.0")) ); } pub fn description_from_readme_plain( text: &str, ) -> Result<(Option, Vec), ProviderError> { let mut lines: Vec<&str> = text.split_terminator('\n').collect(); let mut metadata: Vec = Vec::new(); if lines.is_empty() { return Ok((None, Vec::new())); } if !lines[0].trim().is_empty() && lines.len() > 1 && (lines[1].is_empty() || !lines[1].chars().next().unwrap().is_alphanumeric()) { let (name, summary, version) = parse_first_header_text(lines[0]); if let Some(name) = name { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), }); } if let Some(version) = version { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Likely), }); } if let Some(summary) = summary { metadata.push(UpstreamDatumWithMetadata { origin: None, datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Likely), }); } if name.is_some() || version.is_some() || summary.is_some() { lines.remove(0); } } while !lines.is_empty() && lines[0].trim().trim_matches('-').is_empty() { lines.remove(0); } let mut paras: Vec> = Vec::new(); let mut current_para: Vec<&str> = Vec::new(); for line in lines { if line.trim().is_empty() { if !current_para.is_empty() { paras.push(current_para.clone()); current_para.clear(); } } else { current_para.push(line); } } if !current_para.is_empty() { paras.push(current_para.clone()); } let mut output: Vec = Vec::new(); for para in paras { if para.is_empty() { continue; } let line = para.join("\n"); let (skip, extra_metadata) = skip_paragraph(&line); metadata.extend(extra_metadata); if skip { continue; } output.push(format!("{}\n", line)); } let description = if output.len() > 30 { None } else { while !output.is_empty() && output.last().unwrap().trim().is_empty() { output.pop(); } Some(output.join("\n")) }; Ok((description, metadata)) } fn ul_is_field_list(el: Node) -> bool { let names = ["Issues", "Home", "Documentation", "License"]; for li in el.find(Name("li")) { let text = li.text(); if let Some((_, name)) = lazy_regex::regex_captures!(r"([A-Za-z]+)\s*:.*", text.trim()) { if !names.contains(&name) { return false; } } else { return false; } } true } #[test] fn test_ul_is_field_list() { let el = Document::from( r#""#, ); let ul = el.find(Name("ul")).next().unwrap(); assert!(ul_is_field_list(ul)); let el = Document::from( r#"
  • Some other thing
"#, ); let ul = el.find(Name("ul")).next().unwrap(); assert!(!ul_is_field_list(ul)); } fn skip_paragraph_block(para: &Node) -> (bool, Vec) { let (skip, mut extra_metadata) = skip_paragraph(&render(para)); if skip { return (true, extra_metadata); } for child in para.children() { if let Some(text_node) = child.as_text() { if text_node.trim().is_empty() { continue; } } if child.name() == Some("a") { let mut name: Option = None; if let Some(first_child) = para.first_child() { if let Some(text) = first_child.as_text() { name = Some(text.to_string()); } else if first_child.name() == Some("img") { name = first_child.attr("alt").map(|s| s.to_string()); } } if let Some(name) = name { match name.as_str() { "CRAN" | "CRAN_Status_Badge" | "CRAN_Logs_Badge" => { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Archive("CRAN".to_string()), certainty: Some(Certainty::Confident), origin: None, }); } "Gitter" => { if let Some(href) = child.attr("href") { let parsed_url = Url::parse(href).unwrap(); extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}", parsed_url.path().trim_start_matches('/') )), certainty: Some(Certainty::Confident), origin: None, }); } } "Build Status" => { if let Some(href) = child.attr("href") { let parsed_url = Url::parse(href).unwrap(); if parsed_url.host_str() == Some("travis-ci.org") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Repository(format!( "https://github.com/{}", parsed_url.path().trim_start_matches('/') )), certainty: Some(Certainty::Confident), origin: None, }); } } } "Documentation" => { if let Some(href) = child.attr("href") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(href.to_string()), certainty: Some(Certainty::Confident), origin: None, }); } } "API Docs" => { if let Some(href) = child.attr("href") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::APIDocumentation(href.to_string()), certainty: Some(Certainty::Confident), origin: None, }); } } "Downloads" => { if let Some(href) = child.attr("href") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Download(href.to_string()), certainty: Some(Certainty::Confident), origin: None, }); } } "crates.io" => { if let Some(href) = child.attr("href") { if href.starts_with("https://crates.io/crates/") { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::CargoCrate( href.rsplit('/').next().unwrap().to_string(), ), certainty: Some(Certainty::Confident), origin: None, }); } } } name => { let re = Regex::new(r"(.*) License").unwrap(); if let Some(caps) = re.captures(name) { extra_metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(caps[1].to_string()), certainty: Some(Certainty::Likely), origin: None, }); } else { log::debug!("Unhandled field {:?} in README", name); } } } } } } if render(para).is_empty() { return (true, extra_metadata); } (false, vec![]) } fn render(el: &Node) -> String { el.find(Text).map(|t| t.text()).collect::>().join("") } fn parse_first_header(el: &Node) -> Vec { let mut metadata = Vec::new(); let binding = render(el); let (name, summary, version) = parse_first_header_text(&binding); if let Some(mut name) = name { if name.to_lowercase().contains("installation") { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Possible), origin: None, }); } else { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } if let Some(suffix) = name.strip_prefix("About ") { name = suffix; } metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Name(name.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } if let Some(summary) = summary { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Summary(summary.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } if let Some(version) = version { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Version(version.to_string()), certainty: Some(Certainty::Likely), origin: None, }); } metadata } fn is_semi_header(el: &Node) -> bool { if el.name() != Some("p") { return false; } let text = render(el); if text == "INSTALLATION" { return true; } if text.contains('\n') { return false; } let re = Regex::new(r"([a-z-A-Z0-9]+) - ([^\.]+)").unwrap(); re.is_match(&text) } fn extract_paragraphs<'a>( children: impl Iterator>, paragraphs: &mut Vec, metadata: &mut Vec, ) { for child in children { match child.name() { Some("div") => { extract_paragraphs(child.children(), paragraphs, metadata); if !paragraphs.is_empty() && child.is(Class("section")) { break; } } Some("section") => { extract_paragraphs(child.children(), paragraphs, metadata); if !paragraphs.is_empty() { break; } } Some("p") => { if is_semi_header(&child) { if paragraphs.is_empty() { metadata.extend(parse_first_header(&child)); continue; } else { break; } } let (skip, extra_metadata) = skip_paragraph_block(&child); metadata.extend(extra_metadata); if skip { if paragraphs.is_empty() { continue; } else { break; } } let text = render(&child); if !text.trim().is_empty() { paragraphs.push(text + "\n"); } } Some("pre") => paragraphs.push(render(&child)), Some("ul") if !paragraphs.is_empty() => { if ul_is_field_list(child) { metadata.extend(parse_ul_field_list(&child)); } else { paragraphs.push( child .find(Name("li")) .map(|li| format!("* {}\n", render(&li))) .collect::>() .join(""), ); } } Some(h) if h.starts_with("h") => { if paragraphs.is_empty() { if !["About", "Introduction", "Overview", "Documentation"] .contains(&render(&child).trim()) { metadata.extend(parse_first_header(&child)); } } else { break; } } None => {} _ => { log::debug!("Unhandled element in README: {:?}", child.name()); } } } } fn parse_field(name: &str, body: &NodeOrText) -> Vec { let mut metadata = Vec::new(); let get_link = || -> Option { match body { NodeOrText::Node(body) => { if let Some(a) = body.find(Name("a")).next() { return Some(a.attr("href").unwrap().to_string()); } else if body.is(Name("a")) { return Some(body.attr("href").unwrap().to_string()); } else if let Some(text) = body.as_text().filter(|u| Url::parse(u).is_ok()) { return Some(text.to_string()); } else { return None; } } NodeOrText::Text(text) => { if let Ok(url) = Url::parse(text) { return Some(url.to_string()); } None } } }; match name { "Homepage" | "Home" => { if let Some(link) = get_link() { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Homepage(link), certainty: Some(Certainty::Confident), origin: None, }); } } "Issues" => { if let Some(link) = get_link() { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::BugDatabase(link), certainty: Some(Certainty::Confident), origin: None, }); } } "Documentation" => { if let Some(link) = get_link() { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::Documentation(link), certainty: Some(Certainty::Confident), origin: None, }); } } "License" => { metadata.push(UpstreamDatumWithMetadata { datum: UpstreamDatum::License(match body { NodeOrText::Node(body) => render(body), NodeOrText::Text(text) => text.to_string(), }), certainty: Some(Certainty::Confident), origin: None, }); } _ => { log::debug!("Unhandled field {:?} in README", name); } } metadata } enum NodeOrText<'a> { Node(Node<'a>), Text(&'a str), } impl<'a> From> for NodeOrText<'a> { fn from(node: Node<'a>) -> Self { if let Some(text) = node.as_text() { NodeOrText::Text(text) } else { NodeOrText::Node(node) } } } impl<'a> From<&'a str> for NodeOrText<'a> { fn from(text: &'a str) -> Self { NodeOrText::Text(text) } } /// Extracts a list of fields from a `ul` element. /// /// # Arguments /// * `el` - The `ul` element to extract fields from. /// /// # Returns /// A list of fields extracted from the `ul` element. fn iter_ul_field_list<'a>(el: &'a Node<'a>) -> Vec<(&'a str, NodeOrText<'a>)> { el.find(Name("li")) .filter_map(|li| { let children: Vec<_> = li.children().collect(); if children.len() == 2 && children[0].is(Text) { let name = children[0].as_text().unwrap().trim().trim_end_matches(':'); return Some((name, children[1].into())); } else if children.len() == 1 { let (name, value) = children[0].as_text().unwrap().split_once(':')?; return Some(( name.trim(), NodeOrText::Text(value.trim().trim_start_matches(':')), )); } None }) .collect() } /// Parses a list of fields from a `ul` element. /// /// # Arguments /// * `el` - The `ul` element to parse. /// /// # Returns /// A list of metadata extracted from the `ul` element. fn parse_ul_field_list(el: &Node) -> Vec { let mut metadata = Vec::new(); for (name, el_ref) in iter_ul_field_list(el) { metadata.extend(parse_field(name, &el_ref)); } metadata } fn description_from_basic_soup( soup: &Document, ) -> (Option, Vec) { let mut metadata = Vec::new(); let body = soup .find(Name("body")) .next() .expect("No body element found in HTML document"); let mut child_iter = body.children().peekable(); // Drop any headers while let Some(el) = child_iter.peek() { if el.name().map(|h| h.starts_with("h")).unwrap_or(false) { metadata.extend(parse_first_header(el)); child_iter.next(); } else if el.is(Text) { child_iter.next(); continue; } else { break; } } if let Some(table) = soup.find(And(Name("table"), Class("field-list"))).next() { metadata.extend(parse_ul_field_list(&table)); } let mut paragraphs: Vec = Vec::new(); extract_paragraphs(child_iter, &mut paragraphs, &mut metadata); if paragraphs.is_empty() { log::debug!("Empty description; no paragraphs."); return (None, metadata); } if paragraphs.len() < 6 { return (Some(paragraphs.join("\n")), metadata); } log::debug!( "Not returning description, number of paragraphs too high: {}", paragraphs.len() ); (None, metadata) } pub fn description_from_readme_html( html_text: &str, ) -> Result<(Option, Vec), ProviderError> { let soup = Document::from(html_text); Ok(description_from_basic_soup(&soup)) } fn rst_to_html(rst_text: &str) -> String { use rst_renderer::render_html; use uo_rst_parser::parse; let document = parse(rst_text).unwrap(); let mut output = Vec::new(); render_html(&document, &mut std::io::Cursor::new(&mut output), true).unwrap(); String::from_utf8(output).unwrap() } #[cfg(test)] mod tests { use super::*; #[test] fn test_rst_to_html() { let rst = r#".. _`rst`: RST === This is a test of RST to HTML conversion."#; let html = rst_to_html(rst); assert_eq!( html, "\n\n
\n

RST

\n

This is a test of RST to HTML conversion.

\n
\n\n" ); } #[test] fn test_parse_first_header_text() { assert_eq!( super::parse_first_header_text("libwand 1.0"), (Some("libwand"), None, Some("1.0")) ); assert_eq!( super::parse_first_header_text("libwand -- A wand"), (Some("libwand"), Some("A wand"), None) ); assert_eq!( super::parse_first_header_text("libwand version 1.0"), (Some("libwand"), None, Some("1.0")) ); } #[test] fn test_parse_field() { assert_eq!( super::parse_field( "Homepage", &root(&Document::from( r#"example"# )) .into() ), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Homepage("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); assert_eq!( super::parse_field( "Issues", &root(&Document::from( r#"example"# )) .into(), ), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::BugDatabase("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); assert_eq!( super::parse_field( "Documentation", &root(&Document::from( r#"example"# )) .into() ), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Documentation("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); assert_eq!( super::parse_field("License", &"MIT".into()), vec![super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::License("MIT".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }] ); } struct Root; impl select::predicate::Predicate for Root { fn matches(&self, node: &Node) -> bool { node.parent().is_none() } } fn root(doc: &Document) -> Node { let root = doc.find(Root).next().unwrap(); assert_eq!(root.name(), Some("html")); root.find(Name("body")) .next() .unwrap() .first_child() .unwrap() } #[test] fn test_is_semi_header() { let fragment = Document::from("

INSTALLATION

"); assert!(root(&fragment).name() == Some("p")); assert!(super::is_semi_header(&root(&fragment))); let fragment = Document::from("

Some other thing

"); assert!(!super::is_semi_header(&root(&fragment))); } #[test] fn test_iter_ul_field_list() { let fragment = Document::from( r#""#, ); assert_eq!(Some("ul"), root(&fragment).name()); assert_eq!( super::iter_ul_field_list(&root(&fragment)) .iter() .map(|(name, _)| name) .collect::>(), vec![&"Issues", &"Home"] ); } #[test] fn test_parse_ul_field_list() { let fragment = Document::from( r#""#, ); assert_eq!( super::parse_ul_field_list(&root(&fragment)), vec![ super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::BugDatabase( "https://example.com/issues".to_string() ), certainty: Some(super::Certainty::Confident), origin: None, }, super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Homepage("https://example.com".to_string()), certainty: Some(super::Certainty::Confident), origin: None, }, super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::Documentation( "https://example.com/docs".to_string() ), certainty: Some(super::Certainty::Confident), origin: None, }, super::UpstreamDatumWithMetadata { datum: super::UpstreamDatum::License("MIT".to_string()), certainty: Some(super::Certainty::Confident), origin: None, } ] ); } #[test] fn test_render() { let fragment = Document::from("

Some text

"); assert_eq!(super::render(&root(&fragment)), "Some text"); let fragment = Document::from("

Some bold text

"); assert_eq!(super::render(&root(&fragment)), "Some bold text"); } #[test] fn test_extract_paragraphs() { let fragment = Document::from( r#"

Some text

Some more text

"#, ); let mut paragraphs = Vec::new(); super::extract_paragraphs(root(&fragment).children(), &mut paragraphs, &mut vec![]); assert_eq!(paragraphs, vec!["Some text\n", "Some more text\n"]); } #[test] fn test_swh() { let document = Document::from(include_str!("testdata/swh.html")); let (description, metadata) = super::description_from_basic_soup(&document); assert_eq!( description, Some( r#"The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before. The main entry points are: * :class:swh.loader.git.loader.GitLoader for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production. * :class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone repository. * :class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository wrapped in an archive. "# .to_string() ) ); assert_eq!(metadata, vec![]); } } upstream-ontologist-0.2.4/src/repology.rs000064400000000000000000000062451046102023000166720ustar 00000000000000use crate::{ProviderError, UpstreamMetadata}; pub fn parse_repology_name(name: &str) -> Option<(&str, &str)> { let (family, name) = name.split_once(':')?; Some((family, name)) } fn perl_name_to_module(name: &str) -> String { name.split('-') .map(|x| { let mut x = x.chars(); x.next() .unwrap() .to_uppercase() .chain(x) .collect::() }) .collect::>() .join("::") } pub async fn find_upstream_from_repology(name: &str) -> Result { let (family, name) = parse_repology_name(name) .ok_or_else(|| ProviderError::Other("Invalid repology name".to_string()))?; match family { "python" => crate::providers::python::remote_pypi_metadata(name).await, "go" => crate::providers::go::remote_go_metadata(name), "ruby" => crate::providers::ruby::remote_rubygem_metadata(name).await, "node" => crate::providers::node::remote_npm_metadata(name).await, "perl" => crate::providers::perl::remote_cpan_data(&perl_name_to_module(name)).await, "rust" => crate::providers::rust::remote_crate_data(name).await, "haskell" => crate::providers::haskell::remote_hackage_data(name).await, "apmod" => Ok(UpstreamMetadata::new()), "coq" => Ok(UpstreamMetadata::new()), "cursors" => Ok(UpstreamMetadata::new()), "deadbeef" => Ok(UpstreamMetadata::new()), "emacs" => Ok(UpstreamMetadata::new()), "erlang" => Ok(UpstreamMetadata::new()), "fonts" => Ok(UpstreamMetadata::new()), "fortunes" => Ok(UpstreamMetadata::new()), "fusefs" => Ok(UpstreamMetadata::new()), "gimp" => Ok(UpstreamMetadata::new()), "gstreamer" => Ok(UpstreamMetadata::new()), "gtktheme" => Ok(UpstreamMetadata::new()), "raku" => Ok(UpstreamMetadata::new()), "ros" => Ok(UpstreamMetadata::new()), "haxe" => Ok(UpstreamMetadata::new()), "icons" => Ok(UpstreamMetadata::new()), "java" => Ok(UpstreamMetadata::new()), "js" => Ok(UpstreamMetadata::new()), "julia" => Ok(UpstreamMetadata::new()), "ladspa" => Ok(UpstreamMetadata::new()), "lisp" => Ok(UpstreamMetadata::new()), "lua" => Ok(UpstreamMetadata::new()), "lv2" => Ok(UpstreamMetadata::new()), "mingw" => Ok(UpstreamMetadata::new()), "nextcloud" => Ok(UpstreamMetadata::new()), "nginx" => Ok(UpstreamMetadata::new()), "nim" => Ok(UpstreamMetadata::new()), "ocaml" => Ok(UpstreamMetadata::new()), "opencpn" => Ok(UpstreamMetadata::new()), "rhythmbox" => Ok(UpstreamMetadata::new()), "texlive" => Ok(UpstreamMetadata::new()), "tryton" => Ok(UpstreamMetadata::new()), "vapoursynth" => Ok(UpstreamMetadata::new()), "vdr" => Ok(UpstreamMetadata::new()), "vim" => Ok(UpstreamMetadata::new()), "xdrv" => Ok(UpstreamMetadata::new()), "xemacs" => Ok(UpstreamMetadata::new()), name => { log::warn!("Unknown family: {}", name); Ok(UpstreamMetadata::new()) } } } upstream-ontologist-0.2.4/src/testdata/cpan.json000064400000000000000000000017361046102023000201110ustar 00000000000000{ "version" : "0.02", "directory" : false, "mime" : "text/x-script.perl-module", "download_url" : "https://cpan.metacpan.org/authors/id/C/CT/CTRLSOFT/Parse-Pidl-0.02.tar.gz", "sloc" : 5, "status" : "latest", "pod_lines" : [], "version_numified" : 0.02, "stat" : { "uid" : 1009, "gid" : 1009, "mode" : 33188, "mtime" : 1135865157, "size" : 316 }, "indexed" : true, "release" : "Parse-Pidl-0.02", "pod" : "", "author" : "CTRLSOFT", "deprecated" : false, "path" : "lib/Parse/Pidl.pm", "level" : 2, "distribution" : "Parse-Pidl", "module" : [ { "indexed" : true, "authorized" : true, "version" : "0.02", "name" : "Parse::Pidl", "version_numified" : 0.02 } ], "id" : "GIj7X35DE9AYnQ6_TrGxB_VTeOU", "binary" : false, "slop" : 0, "maturity" : "released", "name" : "Pidl.pm", "authorized" : true, "date" : "2005-12-29T14:10:44" } upstream-ontologist-0.2.4/src/testdata/crates.io.json000064400000000000000000000057171046102023000210620ustar 00000000000000{"categories":[],"crate":{"badges":[],"categories":[],"created_at":"2022-10-29T18:45:24.262450+00:00","description":"Friendly distributed version control system","documentation":"https://www.breezy-vcs.org/doc/","downloads":1111,"exact_match":false,"homepage":"https://www.breezy-vcs.org/","id":"breezy","keywords":[],"links":{"owner_team":"/api/v1/crates/breezy/owner_team","owner_user":"/api/v1/crates/breezy/owner_user","owners":"/api/v1/crates/breezy/owners","reverse_dependencies":"/api/v1/crates/breezy/reverse_dependencies","version_downloads":"/api/v1/crates/breezy/downloads","versions":null},"max_stable_version":"3.3.4","max_version":"3.3.4","name":"breezy","newest_version":"3.3.4","recent_downloads":400,"repository":"https://code.launchpad.net/brz","updated_at":"2023-06-04T22:19:43.454989+00:00","versions":[815616,653355]},"keywords":[],"versions":[{"audit_actions":[{"action":"publish","time":"2023-06-04T22:19:43.454989+00:00","user":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"}}],"bin_names":["brz"],"checksum":"fdc7f83f8febcd0dca02075844696a99d27d5d2014d9195ac0a1ae94c8393886","crate":"breezy","crate_size":25781,"created_at":"2023-06-04T22:19:43.454989+00:00","dl_path":"/api/v1/crates/breezy/3.3.4/download","downloads":630,"features":{"default":["i18n"],"i18n":["dep:gettext-rs"]},"has_lib":true,"id":815616,"lib_links":null,"license":"GPL-2.0+","links":{"authors":"/api/v1/crates/breezy/3.3.4/authors","dependencies":"/api/v1/crates/breezy/3.3.4/dependencies","version_downloads":"/api/v1/crates/breezy/3.3.4/downloads"},"num":"3.3.4","published_by":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"},"readme_path":"/api/v1/crates/breezy/3.3.4/readme","rust_version":null,"updated_at":"2023-06-04T22:19:43.454989+00:00","yanked":false},{"audit_actions":[{"action":"publish","time":"2022-10-29T18:45:24.262450+00:00","user":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"}}],"bin_names":["brz"],"checksum":"f41676959b4cb2f828b5f368e64df1048a2c1e6536aff5364e6183e980ecf0f9","crate":"breezy","crate_size":8620291,"created_at":"2022-10-29T18:45:24.262450+00:00","dl_path":"/api/v1/crates/breezy/3.3.0/download","downloads":480,"features":{},"has_lib":false,"id":653355,"lib_links":null,"license":"GPL-2.0+","links":{"authors":"/api/v1/crates/breezy/3.3.0/authors","dependencies":"/api/v1/crates/breezy/3.3.0/dependencies","version_downloads":"/api/v1/crates/breezy/3.3.0/downloads"},"num":"3.3.0","published_by":{"avatar":"https://avatars.githubusercontent.com/u/49032?v=4","id":38734,"login":"jelmer","name":"Jelmer Vernooij","url":"https://github.com/jelmer"},"readme_path":"/api/v1/crates/breezy/3.3.0/readme","rust_version":null,"updated_at":"2022-10-29T18:45:24.262450+00:00","yanked":false}]}upstream-ontologist-0.2.4/src/testdata/docdb-v.json000064400000000000000000000132041046102023000204770ustar 00000000000000{"shortname": "docdb-v", "name": "DocDB", "_id": "519a2fa6e88f3d77c1b32bdf", "url": "https://sourceforge.net/p/docdb-v/", "private": false, "short_description": "DocDB is a powerful and flexible collaborative web based document server which maintains a versioned list of documents. Information maintained in the database includes, author(s), title, topic(s), abstract, access restriction information, etc.", "creation_date": "2006-03-29", "summary": "", "external_homepage": "http://docdb-v.sourceforge.net", "video_url": "", "socialnetworks": [], "status": "active", "moved_to_url": "", "preferred_support_tool": "_url", "preferred_support_url": "http://sourceforge.net/tracker/?func=add&group_id=164024&atid=830064", "developers": [{"username": "mteck2", "name": "Marcia Teckenbrock", "url": "https://sourceforge.net/u/mteck2/"}, {"username": "vondo", "name": "Eric Vaandering", "url": "https://sourceforge.net/u/vondo/"}, {"username": "garren", "name": "L. Garren", "url": "https://sourceforge.net/u/garren/"}], "tools": [{"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/docdb-v/support/", "mount_label": "Support"}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/docdb-v/reviews/", "mount_label": "Reviews"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/docdb-v/files/", "mount_label": "Files"}, {"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/docdb-v/summary/", "mount_label": "Summary", "sourceforge_group_id": 164024}, {"name": "tickets", "mount_point": "feature-requests", "url": "https://sourceforge.net/p/docdb-v/feature-requests/", "mount_label": "Feature Requests", "api_url": "https://sourceforge.net/rest/p/docdb-v/feature-requests/"}, {"name": "tickets", "mount_point": "bugs", "url": "https://sourceforge.net/p/docdb-v/bugs/", "mount_label": "Bugs", "api_url": "https://sourceforge.net/rest/p/docdb-v/bugs/"}, {"name": "blog", "mount_point": "news", "url": "https://sourceforge.net/p/docdb-v/news/", "mount_label": "News", "api_url": "https://sourceforge.net/rest/p/docdb-v/news/"}, {"name": "cvs", "mount_point": "cvs", "url": "https://sourceforge.net/p/docdb-v/cvs/", "mount_label": "CVS"}, {"name": "git", "mount_point": "git", "url": "https://sourceforge.net/p/docdb-v/git/", "mount_label": "Git", "api_url": "https://sourceforge.net/rest/p/docdb-v/git/", "clone_url_https_anon": "https://git.code.sf.net/p/docdb-v/git", "clone_url_ro": "git://git.code.sf.net/p/docdb-v/git"}, {"name": "discussion", "mount_point": "discussion", "url": "https://sourceforge.net/p/docdb-v/discussion/", "mount_label": "Discussion", "api_url": "https://sourceforge.net/rest/p/docdb-v/discussion/"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/docdb-v/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/docdb-v/activity/"}, {"name": "mailman", "mount_point": "mailman", "url": "https://sourceforge.net/p/docdb-v/mailman/", "mount_label": "Mailing Lists"}], "labels": [], "categories": {"audience": [{"id": 2, "shortname": "end-users-desktop", "fullname": "End Users/Desktop", "fullpath": "Intended Audience :: by End-User Class :: End Users/Desktop"}], "developmentstatus": [{"id": 11, "shortname": "5-production-stable", "fullname": "5 - Production/Stable", "fullpath": "Development Status :: 5 - Production/Stable"}], "environment": [{"id": 237, "shortname": "web-based", "fullname": "Web-based", "fullpath": "User Interface :: Web-based"}], "language": [{"id": 176, "shortname": "perl", "fullname": "Perl", "fullpath": "Programming Language :: Perl"}], "license": [{"id": 15, "shortname": "gnu-general-public-license-version-2.0-gplv2", "fullname": "GNU General Public License version 2.0 (GPLv2)", "fullpath": "License :: OSI-Approved Open Source :: GNU General Public License version 2.0 (GPLv2)"}], "translation": [], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 235, "shortname": "os-independent", "fullname": "OS Independent", "fullpath": "Operating System :: OS Independent"}], "database": [{"id": 504, "shortname": "perl-dbi-dbd", "fullname": "Perl DBI/DBD", "fullpath": "Database Environment :: Database API :: Perl DBI/DBD"}, {"id": 524, "shortname": "mysql", "fullname": "MySQL", "fullpath": "Database Environment :: Network-based DBMS :: MySQL"}], "topic": [{"id": 607, "shortname": "project-management", "fullname": "Project Management", "fullpath": "Topic :: Business :: Project Management"}, {"id": 68, "shortname": "front-ends", "fullname": "Front-Ends", "fullpath": "Topic :: Database :: Front-Ends"}, {"id": 97, "shortname": "scientific-engineering", "fullname": "Scientific/Engineering", "fullpath": "Topic :: Scientific/Engineering"}]}, "icon_url": null, "screenshots": [{"url": "https://sourceforge.net/p/docdb-v/screenshot/68919.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68919.jpg/thumb", "caption": "List of Document Authors"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68189.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68189.jpg/thumb", "caption": "DocDB Homepage (reduced)"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68191.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68191.jpg/thumb", "caption": "Document View"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68195.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68195.jpg/thumb", "caption": "Meeting View"}, {"url": "https://sourceforge.net/p/docdb-v/screenshot/68193.jpg", "thumbnail_url": "https://sourceforge.net/p/docdb-v/screenshot/68193.jpg/thumb", "caption": "Calendar of Events"}]}upstream-ontologist-0.2.4/src/testdata/gtab.json000064400000000000000000000102531046102023000200770ustar 00000000000000{"shortname": "gtab", "name": "gtab", "_id": "51adf6eae88f3d037b5e35bb", "url": "https://sourceforge.net/p/gtab/", "private": false, "short_description": "'gtab' is a guitar, bass and drum tablature tool.", "creation_date": "2004-11-12", "summary": "", "external_homepage": "http://gtab.sourceforge.net", "video_url": "", "socialnetworks": [], "status": "active", "moved_to_url": "", "preferred_support_tool": "", "preferred_support_url": "", "developers": [{"username": "m0ta", "name": "Matthias Vogelgesang", "url": "https://sourceforge.net/u/m0ta/"}, {"username": "seavan", "name": "Seavan", "url": "https://sourceforge.net/u/seavan/"}, {"username": "jeanseb", "name": "Jean-S\u00e9bastien Valette", "url": "https://sourceforge.net/u/jeanseb/"}, {"username": "ctrlsoft", "name": "Jelmer Vernooij", "url": "https://sourceforge.net/u/ctrlsoft/"}, {"username": "ebbex", "name": "Ebbex", "url": "https://sourceforge.net/u/ebbex/"}, {"username": "ekolosov", "name": "BlackPanther", "url": "https://sourceforge.net/u/ekolosov/"}], "tools": [{"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/gtab/summary/", "mount_label": "Summary", "sourceforge_group_id": 124026}, {"name": "blog", "mount_point": "news", "url": "https://sourceforge.net/p/gtab/news/", "mount_label": "News", "api_url": "https://sourceforge.net/rest/p/gtab/news/"}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/gtab/reviews/", "mount_label": "Reviews"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/gtab/files/", "mount_label": "Files"}, {"name": "svn", "mount_point": "svn", "url": "https://sourceforge.net/p/gtab/svn/", "mount_label": "SVN", "api_url": "https://sourceforge.net/rest/p/gtab/svn/", "clone_url_https_anon": "https://svn.code.sf.net/p/gtab/svn/trunk", "clone_url_ro": "svn://svn.code.sf.net/p/gtab/svn/trunk"}, {"name": "tickets", "mount_point": "bugs", "url": "https://sourceforge.net/p/gtab/bugs/", "mount_label": "Bugs", "api_url": "https://sourceforge.net/rest/p/gtab/bugs/"}, {"name": "mailman", "mount_point": "mailman", "url": "https://sourceforge.net/p/gtab/mailman/", "mount_label": "Mailing Lists"}, {"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/gtab/support/", "mount_label": "Support"}, {"name": "cvs", "mount_point": "cvs", "url": "https://sourceforge.net/p/gtab/cvs/", "mount_label": "CVS"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/gtab/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/gtab/activity/"}], "labels": [], "categories": {"audience": [{"id": 2, "shortname": "end-users-desktop", "fullname": "End Users/Desktop", "fullpath": "Intended Audience :: by End-User Class :: End Users/Desktop"}], "developmentstatus": [{"id": 9, "shortname": "3-alpha", "fullname": "3 - Alpha", "fullpath": "Development Status :: 3 - Alpha"}], "environment": [{"id": 481, "shortname": "wxwidgets", "fullname": "wxWidgets", "fullpath": "User Interface :: Toolkits/Libraries :: wxWidgets"}, {"id": 479, "shortname": "qt", "fullname": "Qt", "fullpath": "User Interface :: Toolkits/Libraries :: Qt"}], "language": [{"id": 165, "shortname": "c-plus-plus", "fullname": "C++", "fullpath": "Programming Language :: C++"}], "license": [{"id": 15, "shortname": "gnu-general-public-license-version-2.0-gplv2", "fullname": "GNU General Public License version 2.0 (GPLv2)", "fullpath": "License :: OSI-Approved Open Source :: GNU General Public License version 2.0 (GPLv2)"}], "translation": [{"id": 275, "shortname": "english", "fullname": "English", "fullpath": "Translations :: English"}, {"id": 279, "shortname": "german", "fullname": "German", "fullpath": "Translations :: German"}], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 202, "shortname": "bsd", "fullname": "BSD", "fullpath": "Operating System :: BSD"}, {"id": 3616, "shortname": "windows", "fullname": "Windows", "fullpath": "Operating System :: Windows"}], "database": [], "topic": [{"id": 120, "shortname": "audio-editing", "fullname": "Audio Editing", "fullpath": "Topic :: Multimedia :: Sound/Audio :: Audio Editing"}]}, "icon_url": null, "screenshots": []}upstream-ontologist-0.2.4/src/testdata/hg-diff.json000064400000000000000000000063421046102023000204720ustar 00000000000000{"shortname": "hg-diff", "name": "hg-diff", "_id": "4f0389dc1be1ce4a97000127", "url": "https://sourceforge.net/p/hg-diff/", "private": false, "short_description": "hg-diff is a simple GUI program to browse mercurial revisions. It is used to display a summary of all changes between two revisions and to display a graphical comparison of the two versions of each changed file.", "creation_date": "2012-01-03", "summary": "A GUI program to compare mercurial revisions.", "external_homepage": "http://hg-diff.sourceforge.net/", "video_url": "", "socialnetworks": [], "status": "active", "moved_to_url": "", "preferred_support_tool": "tickets", "preferred_support_url": "", "developers": [{"username": "goetzpf", "name": "Goetz Pfeiffer", "url": "https://sourceforge.net/u/goetzpf/"}], "tools": [{"name": "hg", "mount_point": "code", "url": "https://sourceforge.net/p/hg-diff/code/", "mount_label": "Code", "api_url": "https://sourceforge.net/rest/p/hg-diff/code/", "clone_url_ro": "http://hg.code.sf.net/p/hg-diff/code"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/hg-diff/files/", "mount_label": "Files"}, {"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/hg-diff/summary/", "mount_label": "Summary", "sourceforge_group_id": 661105}, {"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/hg-diff/support/", "mount_label": "Support"}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/hg-diff/reviews/", "mount_label": "Reviews"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/hg-diff/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/hg-diff/activity/"}], "labels": [""], "categories": {"audience": [{"id": 3, "shortname": "developers", "fullname": "Developers", "fullpath": "Intended Audience :: by End-User Class :: Developers"}], "developmentstatus": [{"id": 11, "shortname": "5-production-stable", "fullname": "5 - Production/Stable", "fullpath": "Development Status :: 5 - Production/Stable"}], "environment": [{"id": 478, "shortname": "tk", "fullname": "Tk", "fullpath": "User Interface :: Toolkits/Libraries :: Tk"}], "language": [{"id": 178, "shortname": "python", "fullname": "Python", "fullpath": "Programming Language :: Python"}], "license": [{"id": 679, "shortname": "gnu-general-public-license-version-3.0-gplv3", "fullname": "GNU General Public License version 3.0 (GPLv3)", "fullpath": "License :: OSI-Approved Open Source :: GNU General Public License version 3.0 (GPLv3)"}], "translation": [{"id": 275, "shortname": "english", "fullname": "English", "fullpath": "Translations :: English"}], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 202, "shortname": "bsd", "fullname": "BSD", "fullpath": "Operating System :: BSD"}], "database": [], "topic": [{"id": 841, "shortname": "mercurial", "fullname": "Mercurial", "fullpath": "Topic :: Software Development :: Version Control :: Mercurial"}]}, "icon_url": null, "screenshots": [{"url": "https://sourceforge.net/p/hg-diff/screenshot/hg-diff-small.png", "thumbnail_url": "https://sourceforge.net/p/hg-diff/screenshot/hg-diff-small.png/thumb", "caption": "the hg-diff main window"}]}upstream-ontologist-0.2.4/src/testdata/npm.json000064400000000000000000000070351046102023000177600ustar 00000000000000{"_id":"leftpad","_rev":"9-5c1f9bbc73c2fe312cd34d79f82d64f3","name":"leftpad","description":"left pad numbers","dist-tags":{"latest":"0.0.1"},"versions":{"0.0.0":{"name":"leftpad","version":"0.0.0","description":"left pad numbers","main":"index.js","scripts":{"test":"echo \"Error: no test specified\" && exit 1"},"repository":{"type":"git","url":"https://github.com/tmcw/leftpad.git"},"keywords":["pad","numbers","formatting","format"],"author":{"name":"Tom MacWright","email":"tom@macwright.org"},"license":"BSD","bugs":{"url":"https://github.com/tmcw/leftpad/issues"},"_id":"leftpad@0.0.0","dist":{"shasum":"020c9ad0787216ba0f30d79d479b4b355d7d39c3","tarball":"https://registry.npmjs.org/leftpad/-/leftpad-0.0.0.tgz","integrity":"sha512-/t6U+lg9XlxfLONkDyRclQrn5Hzp6WQsC79hw/6DSySh3k2tyqgVTQCJTUnRP4PrqlUWqWX2zj13JeZSUGzLdw==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEUCIQCNH1vf5b0RkSCpAq8NRdjyCT7c5SSWcOHm/BoK2uraTgIgRkXvz+CIXcQR+vQzX5p4ruPrMNOnwJZ1OLOsb+DW5HM="}]},"_from":".","_npmVersion":"1.2.23","_npmUser":{"name":"tmcw","email":"tom@macwright.org"},"maintainers":[{"name":"tmcw","email":"tom@macwright.org"}],"directories":{},"deprecated":"Use the built-in String.padStart function instead"},"0.0.1":{"name":"leftpad","version":"0.0.1","description":"left pad numbers","main":"index.js","scripts":{"test":"node test.js"},"repository":{"type":"git","url":"git+https://github.com/tmcw/leftpad.git"},"keywords":["pad","numbers","formatting","format"],"files":["index.js"],"author":{"name":"Tom MacWright","email":"tom@macwright.org"},"license":"BSD-3-Clause","devDependencies":{"jsverify":"^0.8.2"},"gitHead":"db1442a0556c2b133627ffebf455a78a1ced64b9","bugs":{"url":"https://github.com/tmcw/leftpad/issues"},"homepage":"https://github.com/tmcw/leftpad#readme","_id":"leftpad@0.0.1","_shasum":"86b1a4de4face180ac545a83f1503523d8fed115","_from":".","_npmVersion":"4.2.0","_nodeVersion":"7.9.0","_npmUser":{"name":"tmcw","email":"tom+npm@macwright.org"},"dist":{"shasum":"86b1a4de4face180ac545a83f1503523d8fed115","tarball":"https://registry.npmjs.org/leftpad/-/leftpad-0.0.1.tgz","integrity":"sha512-kBAuxBQJlJ85LDc+SnGSX6gWJnJR9Qk4lbgXmz/qPfCOCieCk7BgoN3YvzoNr5BUjqxQDOQxawJJvXXd6c+6Mg==","signatures":[{"keyid":"SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA","sig":"MEYCIQCP9QFGBbsmVNj2JCpsX+YiML3z4OaES8TTiHZtHZNctwIhAKydP47vpc5XAMSz88TpgUT6S96t/TLu+RQFvkYIUACe"}]},"maintainers":[{"name":"tmcw","email":"tom@macwright.org"}],"_npmOperationalInternal":{"host":"packages-18-east.internal.npmjs.com","tmp":"tmp/leftpad-0.0.1.tgz_1493781560715_0.519889178685844"},"directories":{},"deprecated":"Use the built-in String.padStart function instead"}},"readme":"## leftpad\n\n[![CircleCI](https://circleci.com/gh/tmcw/leftpad/tree/master.svg?style=shield)](https://circleci.com/gh/tmcw/leftpad/tree/master)\n\nLike the [pad module](https://github.com/wdavidw/node-pad), except I'll remember\nthe argument order.\n\n```js\nvar leftpad = require('leftpad');\n\nleftpad(5, 10);\n'0000000005'\n```\n","maintainers":[{"email":"tom@macwright.org","name":"tmcw"}],"time":{"modified":"2022-06-19T11:27:17.024Z","created":"2013-06-19T01:28:05.244Z","0.0.0":"2013-06-19T01:28:06.350Z","0.0.1":"2017-05-03T03:19:21.248Z"},"author":{"name":"Tom MacWright","email":"tom@macwright.org"},"repository":{"type":"git","url":"git+https://github.com/tmcw/leftpad.git"},"homepage":"https://github.com/tmcw/leftpad#readme","keywords":["pad","numbers","formatting","format"],"bugs":{"url":"https://github.com/tmcw/leftpad/issues"},"license":"BSD-3-Clause","readmeFilename":"README.md"}upstream-ontologist-0.2.4/src/testdata/pecl.html000064400000000000000000000526021046102023000201040ustar 00000000000000 PECL :: Package :: smbclient
Login  |  Packages  |  Support  |  Bugs
Top Level :: File System :: smbclient

smbclient

Package Information
Summary A PHP wrapper for libsmbclient
Maintainers Eduardo Bacchi Kienetz (lead) [details]
Remi Collet (contributor) [details]
License BSD 2-clause
Description smbclient is a PHP extension that uses Samba's libsmbclient library to provide
Samba related functions and 'smb' streams to PHP programs.
Homepage https://github.com/eduardok/libsmbclient-php

[ Latest Tarball ] [ Changelog ] [ View Statistics ]
[ Browse Source ] [ Package Bugs ]

Available Releases
Version State Release Date Downloads  
1.1.1 stable 2023-04-17 smbclient-1.1.1.tgz (34.6kB) [ Changelog ]
1.1.0 stable 2023-04-01 smbclient-1.1.0.tgz (34.3kB) [ Changelog ]
1.0.7 stable 2023-04-01 smbclient-1.0.7.tgz (32.5kB) [ Changelog ]
1.0.6 stable 2021-02-28 smbclient-1.0.6.tgz (32.4kB) [ Changelog ]
1.0.5 stable 2021-02-11 smbclient-1.0.5.tgz (32.2kB) [ Changelog ]
1.0.4 stable 2021-01-22 smbclient-1.0.4.tgz (32.1kB) [ Changelog ]
1.0.3 stable 2021-01-21 smbclient-1.0.3.tgz (31.9kB) [ Changelog ]
1.0.2 stable 2021-01-20 smbclient-1.0.2.tgz (31.8kB) [ Changelog ]
1.0.1 stable 2020-12-29 smbclient-1.0.1.tgz (31.6kB) [ Changelog ]
1.0.0 stable 2018-12-24 smbclient-1.0.0.tgz (31.3kB) [ Changelog ]
0.9.0 stable 2017-02-10 smbclient-0.9.0.tgz (31.1kB) [ Changelog ]
0.8.0 stable 2016-03-01 smbclient-0.8.0.tgz (30.2kB) [ Changelog ]
0.8.0RC1 beta 2015-12-08 smbclient-0.8.0RC1.tgz (30.1kB) [ Changelog ]


Dependencies
Release 1.1.1: PEAR Package: PEAR 1.9.5 or newer
PHP Version: PHP 5.3.7 or newer
Release 1.1.0: PHP Version: PHP 5.3.7 or newer
PEAR Package: PEAR 1.9.5 or newer
Release 1.0.7: PHP Version: PHP 5.3.0 or newer
PEAR Package: PEAR 1.9.5 or newer
Dependencies for older releases can be found on the release overview page.
PRIVACY POLICY  |  CREDITS
Copyright © 2001-2023 The PHP Group
All rights reserved.
Last updated: Mon Jun 01 07:05:01 2020 UTC
Bandwidth and hardware provided by: pair Networks
upstream-ontologist-0.2.4/src/testdata/pypi.json000064400000000000000000000410221046102023000201410ustar 00000000000000{"info":{"author":null,"author_email":null,"bugtrack_url":null,"classifiers":["Development Status :: 4 - Beta","License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)","Operating System :: POSIX","Programming Language :: Python :: 3.10","Programming Language :: Python :: 3.11","Programming Language :: Python :: 3.12","Programming Language :: Python :: 3.8","Programming Language :: Python :: 3.9","Programming Language :: Python :: Implementation :: CPython","Programming Language :: Python :: Implementation :: PyPy"],"description":"A Python implementation of 3-way merge of texts.\n\nGiven BASE, OTHER, THIS, tries to produce a combined text\nincorporating the changes from both BASE->OTHER and BASE->THIS.\nAll three will typically be sequences of lines.\n\nUsage\n=====\n\nFrom the command-line::\n\n $ echo foo > mine\n $ echo bar > base\n $ echo blah > other\n $ python -m merge3 mine base other > merged\n $ cat merged\n\nOr from Python::\n\n >>> import merge3\n >>> m3 = merge3.Merge3(\n ... ['common\\n', 'base\\n'],\n ... ['common\\n', 'a\\n'],\n ... ['common\\n', 'b\\n'])\n >>> list(m3.merge_annotated())\n ['u | common\\n', '<<<<\\n', 'A | a\\n', '----\\n', 'B | b\\n', '>>>>\\n']\n","description_content_type":"text/x-rst","docs_url":null,"download_url":null,"downloads":{"last_day":-1,"last_month":-1,"last_week":-1},"dynamic":null,"home_page":null,"keywords":null,"license":"GNU GPLv2 or later","maintainer":null,"maintainer_email":"Breezy Developers ","name":"merge3","package_url":"https://pypi.org/project/merge3/","platform":null,"project_url":"https://pypi.org/project/merge3/","project_urls":{"GitHub":"https://github.com/breezy-team/merge3","Homepage":"https://www.breezy-vcs.org/"},"provides_extra":null,"release_url":"https://pypi.org/project/merge3/0.0.15/","requires_dist":["ruff==0.4.3; extra == \"dev\""],"requires_python":">=3.8","summary":"Python implementation of 3-way merge","version":"0.0.15","yanked":false,"yanked_reason":null},"last_serial":23067049,"releases":{"0.0.1":[{"comment_text":"","digests":{"blake2b_256":"d4d1804a713c68d7c83e71c23318e52a0613ee3053e7fbe65f143e3de39bda13","md5":"c9103afe839ec1ab60e2cc3f1ac31ef3","sha256":"d3235b34beeca02cae6340c84efe308589bae472b6f72d4eaf879a3a21864b3c"},"downloads":-1,"filename":"merge3-0.0.1.tar.gz","has_sig":false,"md5_digest":"c9103afe839ec1ab60e2cc3f1ac31ef3","packagetype":"sdist","python_version":"source","requires_python":null,"size":8193,"upload_time":"2018-07-12T00:16:05","upload_time_iso_8601":"2018-07-12T00:16:05.950804Z","url":"https://files.pythonhosted.org/packages/d4/d1/804a713c68d7c83e71c23318e52a0613ee3053e7fbe65f143e3de39bda13/merge3-0.0.1.tar.gz","yanked":false,"yanked_reason":null}],"0.0.10":[{"comment_text":"","digests":{"blake2b_256":"650d529f846cbb2b257404c6d371c1ab5d8925c83b3ceb80efdfdbe5b1a05391","md5":"2e6f88710525d2db6c3f839a489eba74","sha256":"a6b25ae1d1b05b3325242360ced4d5a266de09e2cb2527f9a6dbeb17291bdfe7"},"downloads":-1,"filename":"merge3-0.0.10-py3-none-any.whl","has_sig":false,"md5_digest":"2e6f88710525d2db6c3f839a489eba74","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":17873,"upload_time":"2022-10-20T21:52:20","upload_time_iso_8601":"2022-10-20T21:52:20.742331Z","url":"https://files.pythonhosted.org/packages/65/0d/529f846cbb2b257404c6d371c1ab5d8925c83b3ceb80efdfdbe5b1a05391/merge3-0.0.10-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"dda5d31be0033a9a7164282a5be0e00f66441a4e9f5fe222735b48d3fd431adf","md5":"3a1cae17cf54ac2a161e244c5da226e5","sha256":"841c688fc0b4054e30f6672a7af5acc9686d2b9875f9abd3954a14b590e4f626"},"downloads":-1,"filename":"merge3-0.0.10.tar.gz","has_sig":false,"md5_digest":"3a1cae17cf54ac2a161e244c5da226e5","packagetype":"sdist","python_version":"source","requires_python":null,"size":17387,"upload_time":"2022-10-20T21:52:23","upload_time_iso_8601":"2022-10-20T21:52:23.363088Z","url":"https://files.pythonhosted.org/packages/dd/a5/d31be0033a9a7164282a5be0e00f66441a4e9f5fe222735b48d3fd431adf/merge3-0.0.10.tar.gz","yanked":false,"yanked_reason":null}],"0.0.11":[{"comment_text":"","digests":{"blake2b_256":"402863ad6466eb4f0237476f822479567dfae0ccd512cb0d128d471fd6e48774","md5":"706b507d120b71f10c1839f45f4f40a7","sha256":"ef82fd95aeac124ea9a1616a6e2ef9368bc4ed0ba3cce5f21323ee9028d86206"},"downloads":-1,"filename":"merge3-0.0.11-py3-none-any.whl","has_sig":false,"md5_digest":"706b507d120b71f10c1839f45f4f40a7","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":17875,"upload_time":"2022-10-28T00:04:16","upload_time_iso_8601":"2022-10-28T00:04:16.219195Z","url":"https://files.pythonhosted.org/packages/40/28/63ad6466eb4f0237476f822479567dfae0ccd512cb0d128d471fd6e48774/merge3-0.0.11-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"1bef6543392d9dcca7694c9c9bff93562107c3a3c104165f98348de41a080cd3","md5":"ee9a2b9054b3123d6a783fc94f388ee6","sha256":"859ee1c31595c148f0961c55402779bc98c1c63dfdfca2f2cd7d443be6f0ab9c"},"downloads":-1,"filename":"merge3-0.0.11.tar.gz","has_sig":false,"md5_digest":"ee9a2b9054b3123d6a783fc94f388ee6","packagetype":"sdist","python_version":"source","requires_python":null,"size":16511,"upload_time":"2022-10-28T00:04:18","upload_time_iso_8601":"2022-10-28T00:04:18.241920Z","url":"https://files.pythonhosted.org/packages/1b/ef/6543392d9dcca7694c9c9bff93562107c3a3c104165f98348de41a080cd3/merge3-0.0.11.tar.gz","yanked":false,"yanked_reason":null}],"0.0.12":[{"comment_text":"","digests":{"blake2b_256":"125627d09d7bcc9222522f71aac3b0b1696132af4a11f9e0cdccd53bb32b6bfe","md5":"8ced7a7d8feff0235e3f58b51af24a0e","sha256":"ae65e506488778a046af040e424ceb041d0cee4b493c2ef2daac852cc92a8b84"},"downloads":-1,"filename":"merge3-0.0.12-py3-none-any.whl","has_sig":false,"md5_digest":"8ced7a7d8feff0235e3f58b51af24a0e","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":17874,"upload_time":"2022-11-20T12:25:16","upload_time_iso_8601":"2022-11-20T12:25:16.528915Z","url":"https://files.pythonhosted.org/packages/12/56/27d09d7bcc9222522f71aac3b0b1696132af4a11f9e0cdccd53bb32b6bfe/merge3-0.0.12-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"7d1d1a2a0ff25b18cc3b7af41180821099696c2c34e4459fff09a2d19729281e","md5":"1e62f7b01f2336f8771ec2edb6b9f991","sha256":"fd3fc873dcf60b9944606d125f72643055c739ff41793979ccbdea3ea6818d36"},"downloads":-1,"filename":"merge3-0.0.12.tar.gz","has_sig":false,"md5_digest":"1e62f7b01f2336f8771ec2edb6b9f991","packagetype":"sdist","python_version":"source","requires_python":null,"size":17454,"upload_time":"2022-11-20T12:25:18","upload_time_iso_8601":"2022-11-20T12:25:18.409844Z","url":"https://files.pythonhosted.org/packages/7d/1d/1a2a0ff25b18cc3b7af41180821099696c2c34e4459fff09a2d19729281e/merge3-0.0.12.tar.gz","yanked":false,"yanked_reason":null}],"0.0.13":[{"comment_text":"","digests":{"blake2b_256":"61fc0eb72422b8f64643ca7d9bc2b9076ec550388ff8e1022425fa11e7012e94","md5":"da44d9726a6b82c0146b6235d8e465cd","sha256":"4f8ee5f8c61823ff7c3ae68e6d0d2966bbf249bb08dc5e6f2abffd8d97c62cf3"},"downloads":-1,"filename":"merge3-0.0.13-py3-none-any.whl","has_sig":false,"md5_digest":"da44d9726a6b82c0146b6235d8e465cd","packagetype":"bdist_wheel","python_version":"py3","requires_python":null,"size":14680,"upload_time":"2023-02-07T20:39:24","upload_time_iso_8601":"2023-02-07T20:39:24.442838Z","url":"https://files.pythonhosted.org/packages/61/fc/0eb72422b8f64643ca7d9bc2b9076ec550388ff8e1022425fa11e7012e94/merge3-0.0.13-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"dc91647a2942b6f308c7dce358bec770fe62ee0689cfd1dd218b66e244acde7d","md5":"478955e6f5563644e0e62aec0c572aa5","sha256":"8abda1d2d49776323d23d09bfdd80d943a57d43d28d6152ffd2c87956a9b6b54"},"downloads":-1,"filename":"merge3-0.0.13.tar.gz","has_sig":false,"md5_digest":"478955e6f5563644e0e62aec0c572aa5","packagetype":"sdist","python_version":"source","requires_python":null,"size":13603,"upload_time":"2023-02-07T20:39:26","upload_time_iso_8601":"2023-02-07T20:39:26.975182Z","url":"https://files.pythonhosted.org/packages/dc/91/647a2942b6f308c7dce358bec770fe62ee0689cfd1dd218b66e244acde7d/merge3-0.0.13.tar.gz","yanked":false,"yanked_reason":null}],"0.0.14":[{"comment_text":"","digests":{"blake2b_256":"17a85e085653871c70d1b139d7888d0f79ba82d130106157cfc16a9e8078d086","md5":"8546ae3fc037d69070d23ec5a0d8a831","sha256":"7ac0aadbd9ff5bea89ba9bd3796cb26e0df361dc86a381055ce0a45a7b8726ec"},"downloads":-1,"filename":"merge3-0.0.14-py3-none-any.whl","has_sig":false,"md5_digest":"8546ae3fc037d69070d23ec5a0d8a831","packagetype":"bdist_wheel","python_version":"py3","requires_python":">=3.6","size":7950,"upload_time":"2023-09-17T11:50:26","upload_time_iso_8601":"2023-09-17T11:50:26.049150Z","url":"https://files.pythonhosted.org/packages/17/a8/5e085653871c70d1b139d7888d0f79ba82d130106157cfc16a9e8078d086/merge3-0.0.14-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"af4098be4a5641b0244be5881ff5e00057f8227ff183d8675a697bdfeae43b1a","md5":"e20464fec7d124550c3e2c1616c1a0e9","sha256":"30406e99386f4a65280fb9c43e681890fa2a1d839cac2759d156c7cc16030159"},"downloads":-1,"filename":"merge3-0.0.14.tar.gz","has_sig":false,"md5_digest":"e20464fec7d124550c3e2c1616c1a0e9","packagetype":"sdist","python_version":"source","requires_python":">=3.6","size":17742,"upload_time":"2023-09-17T11:50:27","upload_time_iso_8601":"2023-09-17T11:50:27.713705Z","url":"https://files.pythonhosted.org/packages/af/40/98be4a5641b0244be5881ff5e00057f8227ff183d8675a697bdfeae43b1a/merge3-0.0.14.tar.gz","yanked":false,"yanked_reason":null}],"0.0.15":[{"comment_text":"","digests":{"blake2b_256":"d2cd67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8","md5":"0672769e1ac41c6490c68d9d64d142e2","sha256":"97b5f54a17181be261fde03a10cf55f34f910b19fe81e21cbd5fe87a3395c4a6"},"downloads":-1,"filename":"merge3-0.0.15-py3-none-any.whl","has_sig":false,"md5_digest":"0672769e1ac41c6490c68d9d64d142e2","packagetype":"bdist_wheel","python_version":"py3","requires_python":">=3.8","size":14987,"upload_time":"2024-05-05T16:18:24","upload_time_iso_8601":"2024-05-05T16:18:24.296532Z","url":"https://files.pythonhosted.org/packages/d2/cd/67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8/merge3-0.0.15-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"91e1fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1","md5":"26b5483e68cc6c13f0d86edbb6daa72a","sha256":"d3eac213d84d56dfc9e39552ac8246c7860a940964ebeed8a8be4422f6492baf"},"downloads":-1,"filename":"merge3-0.0.15.tar.gz","has_sig":false,"md5_digest":"26b5483e68cc6c13f0d86edbb6daa72a","packagetype":"sdist","python_version":"source","requires_python":">=3.8","size":14121,"upload_time":"2024-05-05T16:18:26","upload_time_iso_8601":"2024-05-05T16:18:26.027455Z","url":"https://files.pythonhosted.org/packages/91/e1/fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1/merge3-0.0.15.tar.gz","yanked":false,"yanked_reason":null}],"0.0.2":[{"comment_text":"","digests":{"blake2b_256":"8b5cbaef7edc9e570a48a82be3bcf92db86fe3d3dcbebe40ef1113e614149f58","md5":"d5b14233bd3de5b93bb046272fa57c79","sha256":"35f2d6a5ea45d6dc16afb577205e5dd131146f2f8aa446e34f4228eb09afc52f"},"downloads":-1,"filename":"merge3-0.0.2.tar.gz","has_sig":false,"md5_digest":"d5b14233bd3de5b93bb046272fa57c79","packagetype":"sdist","python_version":"source","requires_python":null,"size":8937,"upload_time":"2018-08-02T18:36:08","upload_time_iso_8601":"2018-08-02T18:36:08.506432Z","url":"https://files.pythonhosted.org/packages/8b/5c/baef7edc9e570a48a82be3bcf92db86fe3d3dcbebe40ef1113e614149f58/merge3-0.0.2.tar.gz","yanked":false,"yanked_reason":null}],"0.0.3":[{"comment_text":"","digests":{"blake2b_256":"d50e500030c5835802ff4732a6fff92db94de3425e92bd4ed0800d4ec55b78c8","md5":"f7eff363c5f9e8b26eff2ff4ee1136da","sha256":"891d38e7e03498d5e482e19a701abf8b823b15b86966bd5e5c00da7280500da6"},"downloads":-1,"filename":"merge3-0.0.3.tar.gz","has_sig":false,"md5_digest":"f7eff363c5f9e8b26eff2ff4ee1136da","packagetype":"sdist","python_version":"source","requires_python":null,"size":16637,"upload_time":"2020-05-23T22:20:20","upload_time_iso_8601":"2020-05-23T22:20:20.037323Z","url":"https://files.pythonhosted.org/packages/d5/0e/500030c5835802ff4732a6fff92db94de3425e92bd4ed0800d4ec55b78c8/merge3-0.0.3.tar.gz","yanked":false,"yanked_reason":null}],"0.0.4":[{"comment_text":"","digests":{"blake2b_256":"5b51603938bac36447a2416d58087a21acb488308b6ad90554a5c29a71b47ad3","md5":"7369a27b187f4d87350a5784189c63ec","sha256":"33c896c436f319f5344647abd3b12d4738bc45bd7db490fe54f997f5484a14c0"},"downloads":-1,"filename":"merge3-0.0.4.tar.gz","has_sig":false,"md5_digest":"7369a27b187f4d87350a5784189c63ec","packagetype":"sdist","python_version":"source","requires_python":null,"size":16647,"upload_time":"2021-02-05T01:56:29","upload_time_iso_8601":"2021-02-05T01:56:29.537070Z","url":"https://files.pythonhosted.org/packages/5b/51/603938bac36447a2416d58087a21acb488308b6ad90554a5c29a71b47ad3/merge3-0.0.4.tar.gz","yanked":false,"yanked_reason":null}],"0.0.7":[{"comment_text":"","digests":{"blake2b_256":"9a3acd00ce76e31e5a011ea1025c6063c1897b34650f7f6752d26715546bd860","md5":"8e7dc81be76ce440069858c0552d9699","sha256":"0cd4eecba802733866ace6ca4e68eacbea31e4ca88eb410a09622d9631520258"},"downloads":-1,"filename":"merge3-0.0.7.tar.gz","has_sig":false,"md5_digest":"8e7dc81be76ce440069858c0552d9699","packagetype":"sdist","python_version":"source","requires_python":null,"size":16909,"upload_time":"2021-03-13T00:34:24","upload_time_iso_8601":"2021-03-13T00:34:24.302010Z","url":"https://files.pythonhosted.org/packages/9a/3a/cd00ce76e31e5a011ea1025c6063c1897b34650f7f6752d26715546bd860/merge3-0.0.7.tar.gz","yanked":false,"yanked_reason":null}],"0.0.8":[{"comment_text":"","digests":{"blake2b_256":"9ea0ebac6ed4b7c174832616731c3466e86768a2b856e03043ac55e1e5d49a5a","md5":"225c89dbf56f7b62767ac8a8750d5af4","sha256":"4ef90eda29fb6f291e5d5ee1103ae97e295e15826ef17abee3098f5ce46fe18b"},"downloads":-1,"filename":"merge3-0.0.8.tar.gz","has_sig":false,"md5_digest":"225c89dbf56f7b62767ac8a8750d5af4","packagetype":"sdist","python_version":"source","requires_python":null,"size":17376,"upload_time":"2021-03-13T13:36:11","upload_time_iso_8601":"2021-03-13T13:36:11.269467Z","url":"https://files.pythonhosted.org/packages/9e/a0/ebac6ed4b7c174832616731c3466e86768a2b856e03043ac55e1e5d49a5a/merge3-0.0.8.tar.gz","yanked":false,"yanked_reason":null}],"0.0.9":[{"comment_text":"","digests":{"blake2b_256":"7b8693994c5a6581ab7792ab917e5abfa509f7a1719f865e2f92a22304469176","md5":"f5f9edc8654c312840475b897cb12177","sha256":"e945c08c2aadcfd2d88c1511b839b90d3ce601bb5b9a39809d74b231bfc5ebcc"},"downloads":-1,"filename":"merge3-0.0.9.tar.gz","has_sig":false,"md5_digest":"f5f9edc8654c312840475b897cb12177","packagetype":"sdist","python_version":"source","requires_python":null,"size":17177,"upload_time":"2022-09-25T01:10:06","upload_time_iso_8601":"2022-09-25T01:10:06.352005Z","url":"https://files.pythonhosted.org/packages/7b/86/93994c5a6581ab7792ab917e5abfa509f7a1719f865e2f92a22304469176/merge3-0.0.9.tar.gz","yanked":false,"yanked_reason":null}]},"urls":[{"comment_text":"","digests":{"blake2b_256":"d2cd67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8","md5":"0672769e1ac41c6490c68d9d64d142e2","sha256":"97b5f54a17181be261fde03a10cf55f34f910b19fe81e21cbd5fe87a3395c4a6"},"downloads":-1,"filename":"merge3-0.0.15-py3-none-any.whl","has_sig":false,"md5_digest":"0672769e1ac41c6490c68d9d64d142e2","packagetype":"bdist_wheel","python_version":"py3","requires_python":">=3.8","size":14987,"upload_time":"2024-05-05T16:18:24","upload_time_iso_8601":"2024-05-05T16:18:24.296532Z","url":"https://files.pythonhosted.org/packages/d2/cd/67e3bd2bba4bdc22aee41b27aa865500254c87ab48631ff64008b6ecc1a8/merge3-0.0.15-py3-none-any.whl","yanked":false,"yanked_reason":null},{"comment_text":"","digests":{"blake2b_256":"91e1fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1","md5":"26b5483e68cc6c13f0d86edbb6daa72a","sha256":"d3eac213d84d56dfc9e39552ac8246c7860a940964ebeed8a8be4422f6492baf"},"downloads":-1,"filename":"merge3-0.0.15.tar.gz","has_sig":false,"md5_digest":"26b5483e68cc6c13f0d86edbb6daa72a","packagetype":"sdist","python_version":"source","requires_python":">=3.8","size":14121,"upload_time":"2024-05-05T16:18:26","upload_time_iso_8601":"2024-05-05T16:18:26.027455Z","url":"https://files.pythonhosted.org/packages/91/e1/fe09c161f80b5a8d8ede3270eadedac7e59a64ea1c313b97c386234480c1/merge3-0.0.15.tar.gz","yanked":false,"yanked_reason":null}],"vulnerabilities":[]} upstream-ontologist-0.2.4/src/testdata/rubygem.json000064400000000000000000000021371046102023000206360ustar 00000000000000{"name":"bullet","downloads":122616138,"version":"7.2.0","version_created_at":"2024-07-12T13:34:07.552Z","version_downloads":498664,"platform":"ruby","authors":"Richard Huang","info":"help to kill N+1 queries and unused eager loading.","licenses":["MIT"],"metadata":{"changelog_uri":"https://github.com/flyerhzm/bullet/blob/main/CHANGELOG.md","source_code_uri":"https://github.com/flyerhzm/bullet"},"yanked":false,"sha":"3502c8a1b4f5db77fc8f6d3dd89a6a8c1a968219a45e12ae6cbaa9c09967ea89","spec_sha":"c8cfdc6562e3e85302c624d2464c2f77b3a50a272ba15d64fcd021107fdaa0b8","project_uri":"https://rubygems.org/gems/bullet","gem_uri":"https://rubygems.org/gems/bullet-7.2.0.gem","homepage_uri":"https://github.com/flyerhzm/bullet","wiki_uri":null,"documentation_uri":null,"mailing_list_uri":null,"source_code_uri":"https://github.com/flyerhzm/bullet","bug_tracker_uri":null,"changelog_uri":"https://github.com/flyerhzm/bullet/blob/main/CHANGELOG.md","funding_uri":null,"dependencies":{"development":[],"runtime":[{"name":"activesupport","requirements":"\u003e= 3.0.0"},{"name":"uniform_notifier","requirements":"~\u003e 1.11"}]}}upstream-ontologist-0.2.4/src/testdata/swh.html000064400000000000000000000041751046102023000177640ustar 00000000000000

swh-loader-git

The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before.

The main entry points are:

  • :class:swh.loader.git.loader.GitLoader for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production.

  • :class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone repository.

  • :class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository wrapped in an archive.

License

This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.

This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.

See top-level LICENSE file for the full text of the GNU General Public License along with this program.

Dependencies

Runtime

  • python3
  • python3-dulwich
  • python3-retrying
  • python3-swh.core
  • python3-swh.model
  • python3-swh.storage
  • python3-swh.scheduler

Test

  • python3-nose

Requirements

  • implementation language, Python3
  • coding guidelines: conform to PEP8
  • Git access: via dulwich

CLI Run

You can run the loader from a remote origin (loader) or from an origin on disk (from_disk) directly by calling:

swh loader -C <config-file> run git <git-repository-url>

or "git_disk".

Configuration sample

/tmp/git.yml:

storage:
  cls: remote
  args:
    url: http://localhost:5002/
upstream-ontologist-0.2.4/src/testdata/zsh.json000064400000000000000000000173701046102023000177750ustar 00000000000000{"shortname": "zsh", "name": "zsh", "_id": "5102b27f5fcbc919e876e8a3", "url": "https://sourceforge.net/p/zsh/", "private": false, "short_description": "zsh is a shell probably most similar to ksh, but with countless enhancements and differences.", "creation_date": "2000-03-27", "summary": "Customizable command shell for UNIX-like environments", "external_homepage": "http://zsh.sourceforge.net/", "video_url": null, "socialnetworks": [{"accounturl": "", "socialnetwork": "Twitter"}, {"accounturl": null, "socialnetwork": "Facebook"}], "status": "active", "moved_to_url": "", "preferred_support_tool": "support-requests", "preferred_support_url": "", "developers": [{"username": "barts", "name": "Bart Schaefer", "url": "https://sourceforge.net/u/barts/"}, {"username": "adamspiers", "name": "Adam Spiers", "url": "https://sourceforge.net/u/adamspiers/"}, {"username": "romkatv314", "name": "Roman Perepelitsa", "url": "https://sourceforge.net/u/romkatv314/"}, {"username": "jtakimoto", "name": "Jun T.", "url": "https://sourceforge.net/u/jtakimoto/"}, {"username": "mikamika", "name": "Mikael Magnusson", "url": "https://sourceforge.net/u/mikamika/"}, {"username": "pws", "name": "Peter Stephenson", "url": "https://sourceforge.net/u/pws/"}, {"username": "danielshahaf", "name": "Daniel Shahaf", "url": "https://sourceforge.net/u/danielshahaf/"}, {"username": "packersv", "name": "Paul Ackersviller", "url": "https://sourceforge.net/u/packersv/"}, {"username": "okdana", "name": "dana", "url": "https://sourceforge.net/u/okdana/"}, {"username": "bor", "name": "Andrey Borzenkov", "url": "https://sourceforge.net/u/bor/"}, {"username": "pcppopper", "name": "Nikolai Weibull", "url": "https://sourceforge.net/u/pcppopper/"}, {"username": "rudi_s", "name": "Simon Ruderich", "url": "https://sourceforge.net/u/rudi_s/"}, {"username": "acs", "name": "Vin Shelton", "url": "https://sourceforge.net/u/acs/"}, {"username": "clint", "name": "Clint Adams", "url": "https://sourceforge.net/u/clint/"}, {"username": "pdpennock", "name": "Phil Pennock", "url": "https://sourceforge.net/u/pdpennock/"}, {"username": "wayned", "name": "Wayne Davison", "url": "https://sourceforge.net/u/wayned/"}, {"username": "wm3", "name": "Motoi WASHIDA", "url": "https://sourceforge.net/u/wm3/"}, {"username": "hanpt", "name": "hpt", "url": "https://sourceforge.net/u/hanpt/"}, {"username": "phy1729", "name": "phy1729", "url": "https://sourceforge.net/u/phy1729/"}, {"username": "opk", "name": "Oliver Kiddle", "url": "https://sourceforge.net/u/opk/"}, {"username": "f_rosencrantz", "name": "Felix Rosencrantz", "url": "https://sourceforge.net/u/f_rosencrantz/"}, {"username": "akr", "name": "Tanaka Akira", "url": "https://sourceforge.net/u/akr/"}, {"username": "blueyed", "name": "daniel hahler", "url": "https://sourceforge.net/u/blueyed/"}, {"username": "eitanadler", "name": "Eitan Adler", "url": "https://sourceforge.net/u/eitanadler/"}, {"username": "schazelas", "name": "Stephane Chazelas", "url": "https://sourceforge.net/u/schazelas/"}, {"username": "wischnow", "name": "Sven Wischnowsky", "url": "https://sourceforge.net/u/wischnow/"}, {"username": "chmou", "name": "Chmouel Boudjnah", "url": "https://sourceforge.net/u/chmou/"}, {"username": "illua", "name": "Eric Cook", "url": "https://sourceforge.net/u/illua/"}, {"username": "bewater", "name": "Frank Terbeck", "url": "https://sourceforge.net/u/bewater/"}, {"username": "xtaran", "name": "Axel Beckert", "url": "https://sourceforge.net/u/xtaran/"}, {"username": "dkearns", "name": "Doug Kearns", "url": "https://sourceforge.net/u/dkearns/"}, {"username": "richih", "name": "Richard Hartmann", "url": "https://sourceforge.net/u/richih/"}, {"username": "larryv", "name": "Lawrence Vel\u00e1zquez", "url": "https://sourceforge.net/u/larryv/"}, {"username": "gcw", "name": "Geoff Wing", "url": "https://sourceforge.net/u/gcw/"}], "tools": [{"name": "summary", "mount_point": "summary", "url": "https://sourceforge.net/p/zsh/summary/", "mount_label": "Summary", "sourceforge_group_id": 4068}, {"name": "reviews", "mount_point": "reviews", "url": "https://sourceforge.net/p/zsh/reviews/", "mount_label": "Reviews"}, {"name": "blog", "mount_point": "news", "url": "https://sourceforge.net/p/zsh/news/", "mount_label": "News", "api_url": "https://sourceforge.net/rest/p/zsh/news/"}, {"name": "git", "mount_point": "code", "url": "https://sourceforge.net/p/zsh/code/", "mount_label": "Code", "api_url": "https://sourceforge.net/rest/p/zsh/code/", "clone_url_https_anon": "https://git.code.sf.net/p/zsh/code", "clone_url_ro": "git://git.code.sf.net/p/zsh/code"}, {"name": "files-sf", "mount_point": "files", "url": "https://sourceforge.net/p/zsh/files/", "mount_label": "Files"}, {"name": "tickets", "mount_point": "patches", "url": "https://sourceforge.net/p/zsh/patches/", "mount_label": "Patches", "api_url": "https://sourceforge.net/rest/p/zsh/patches/"}, {"name": "tickets", "mount_point": "bugs", "url": "https://sourceforge.net/p/zsh/bugs/", "mount_label": "Bugs", "api_url": "https://sourceforge.net/rest/p/zsh/bugs/"}, {"name": "tickets", "mount_point": "support-requests", "url": "https://sourceforge.net/p/zsh/support-requests/", "mount_label": "Support Requests", "api_url": "https://sourceforge.net/rest/p/zsh/support-requests/"}, {"name": "support", "mount_point": "support", "url": "https://sourceforge.net/p/zsh/support/", "mount_label": "Support"}, {"name": "tickets", "mount_point": "feature-requests", "url": "https://sourceforge.net/p/zsh/feature-requests/", "mount_label": "Feature Requests", "api_url": "https://sourceforge.net/rest/p/zsh/feature-requests/"}, {"name": "git", "mount_point": "web", "url": "https://sourceforge.net/p/zsh/web/", "mount_label": "Website-content", "api_url": "https://sourceforge.net/rest/p/zsh/web/", "clone_url_https_anon": "https://git.code.sf.net/p/zsh/web", "clone_url_ro": "git://git.code.sf.net/p/zsh/web"}, {"name": "activity", "mount_point": "activity", "url": "https://sourceforge.net/p/zsh/activity/", "mount_label": "Activity", "api_url": "https://sourceforge.net/rest/p/zsh/activity/"}, {"name": "mailman", "mount_point": "mailman", "url": "https://sourceforge.net/p/zsh/mailman/", "mount_label": "Mailing Lists"}], "labels": [], "categories": {"audience": [{"id": 4, "shortname": "system-administrators", "fullname": "System Administrators", "fullpath": "Intended Audience :: by End-User Class :: System Administrators"}, {"id": 3, "shortname": "developers", "fullname": "Developers", "fullpath": "Intended Audience :: by End-User Class :: Developers"}, {"id": 2, "shortname": "end-users-desktop", "fullname": "End Users/Desktop", "fullpath": "Intended Audience :: by End-User Class :: End Users/Desktop"}], "developmentstatus": [{"id": 12, "shortname": "6-mature", "fullname": "6 - Mature", "fullpath": "Development Status :: 6 - Mature"}], "environment": [{"id": 460, "shortname": "console-terminal", "fullname": "Console/Terminal", "fullpath": "User Interface :: Textual :: Console/Terminal"}, {"id": 459, "shortname": "command-line", "fullname": "Command-line", "fullpath": "User Interface :: Textual :: Command-line"}], "language": [{"id": 185, "shortname": "unix-shell", "fullname": "Unix Shell", "fullpath": "Programming Language :: Unix Shell"}, {"id": 164, "shortname": "c", "fullname": "C", "fullpath": "Programming Language :: C"}], "license": [{"id": 14, "shortname": "osi-approved-open-source", "fullname": "OSI-Approved Open Source", "fullpath": "License :: OSI-Approved Open Source"}], "translation": [], "os": [{"id": 201, "shortname": "linux", "fullname": "Linux", "fullpath": "Operating System :: Linux"}, {"id": 202, "shortname": "bsd", "fullname": "BSD", "fullpath": "Operating System :: BSD"}], "database": [], "topic": [{"id": 294, "shortname": "system-shells", "fullname": "System Shells", "fullpath": "Topic :: System :: System Shells"}]}, "icon_url": "https://sourceforge.net/p/zsh/icon", "screenshots": []}upstream-ontologist-0.2.4/src/vcs.rs000064400000000000000000001523601046102023000156250ustar 00000000000000use crate::with_path_segments; use lazy_regex::regex; use log::{debug, warn}; use std::borrow::Cow; use std::collections::HashMap; use url::Url; pub const VCSES: &[&str] = &["git", "bzr", "hg"]; pub const KNOWN_GITLAB_SITES: &[&str] = &["salsa.debian.org", "invent.kde.org", "0xacab.org"]; pub const SECURE_SCHEMES: &[&str] = &["https", "git+ssh", "bzr+ssh", "hg+ssh", "ssh", "svn+ssh"]; const KNOWN_HOSTING_SITES: &[&str] = &[ "code.launchpad.net", "github.com", "launchpad.net", "git.openstack.org", ]; pub fn plausible_url(url: &str) -> bool { url.contains(':') } pub fn drop_vcs_in_scheme(url: &Url) -> Option { let scheme = url.scheme(); match scheme { "git+http" | "git+https" => { Some(derive_with_scheme(url, scheme.trim_start_matches("git+"))) } "hg+http" | "hg+https" => Some(derive_with_scheme(url, scheme.trim_start_matches("hg+"))), "bzr+lp" | "bzr+http" => Some(derive_with_scheme(url, scheme.trim_start_matches("bzr+"))), _ => None, } } pub fn split_vcs_url(location: &str) -> (String, Option, Option) { let mut url = location.to_string(); let mut branch = None; let mut subpath = None; if let Some(idx) = url.find('[') { if let Some(idx2) = url.find(']') { subpath = Some(url[idx + 1..idx2].to_string()); url = url[..idx].to_string(); } } if let Some(idx) = url.find(" -b ") { branch = Some(url[idx + 4..].to_string()); url = url[..idx].to_string(); } (url, branch, subpath) } pub fn unsplit_vcs_url(location: &VcsLocation) -> String { let mut url = location.url.to_string(); if let Some(branch_name) = location.branch.as_deref() { url = format!("{} -b {}", url, branch_name); } if let Some(subpath_str) = location.subpath.as_deref() { url = format!("{} [{}]", url, subpath_str); } url } pub fn plausible_browse_url(url: &str) -> bool { if let Ok(url) = url::Url::parse(url) { if url.scheme() == "https" || url.scheme() == "http" { return true; } } false } pub fn strip_vcs_prefixes(url: &str) -> &str { let prefixes = ["git", "hg"]; for prefix in prefixes.iter() { if url.starts_with(&format!("{}+", prefix)) { return &url[prefix.len() + 1..]; } } url } async fn probe_upstream_github_branch_url(url: &url::Url, version: Option<&str>) -> Option { let path = url.path(); let path = path.strip_suffix(".git").unwrap_or(path); let api_url = url::Url::parse( format!( "https://api.github.com/repos/{}/tags", path.trim_start_matches('/') ) .as_str(), ) .unwrap(); match crate::load_json_url(&api_url, None).await { Ok(json) => { if let Some(version) = version { let tags = json.as_array()?; let tag_names = tags .iter() .map(|x| x["name"].as_str().unwrap()) .collect::>(); if tag_names.is_empty() { // Uhm, hmm return Some(true); } return Some(version_in_tags(version, tag_names.as_slice())); } Some(true) } Err(crate::HTTPJSONError::Error { status: 404, .. }) => Some(false), Err(crate::HTTPJSONError::Error { status: 403, .. }) => { debug!("github api rate limit exceeded"); None } Err(e) => { warn!("failed to probe github api: {:?}", e); None } } } fn version_in_tags(version: &str, tag_names: &[&str]) -> bool { if tag_names.contains(&version) { return true; } if tag_names.contains(&format!("v{}", version).as_str()) { return true; } if tag_names.contains(&format!("release/{}", version).as_str()) { return true; } if tag_names.contains(&version.replace('.', "_").as_str()) { return true; } for tag_name in tag_names { if tag_name.ends_with(&format!("_{}", version)) { return true; } if tag_name.ends_with(&format!("-{}", version)) { return true; } if tag_name.ends_with(&format!("_{}", version.replace('.', "_"))) { return true; } } false } fn probe_upstream_breezy_branch_url(url: &url::Url, version: Option<&str>) -> Option { let tags: HashMap = breezyshim::ui::with_silent_ui_factory( || -> Result, breezyshim::error::Error> { let branch = breezyshim::branch::open(url)?; branch.tags()?.get_tag_dict() }, ) .map_err(|e| { warn!("failed to probe breezy branch: {:?}", e); e }) .ok()?; let tag_names = tags.keys().map(|x| x.as_str()).collect::>(); if let Some(version) = version { Some(version_in_tags(version, tag_names.as_slice())) } else { Some(true) } } pub async fn probe_upstream_branch_url(url: &url::Url, version: Option<&str>) -> Option { if url.scheme() == "git+ssh" || url.scheme() == "ssh" || url.scheme() == "bzr+ssh" { // Let's not probe anything possibly non-public. return None; } if url.host() == Some(url::Host::Domain("github.com")) { probe_upstream_github_branch_url(url, version).await } else { probe_upstream_breezy_branch_url(url, version) } } pub async fn check_repository_url_canonical( mut url: url::Url, version: Option<&str>, ) -> std::result::Result { if url.host_str() == Some("github.com") { let mut segments = url.path_segments().unwrap().collect::>(); if segments.len() < 2 { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL with less than 2 path elements".to_string(), )); } if segments[0] == "sponsors" { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub sponsors URL".to_string(), )); } segments[1] = segments[1].trim_end_matches(".git"); let api_url = format!( "https://api.github.com/repos/{}/{}", segments[0], segments[1] ); url = match crate::load_json_url(&url::Url::parse(api_url.as_str()).unwrap(), None).await { Ok(data) => { if data["archived"].as_bool().unwrap_or(false) { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL is archived".to_string(), )); } if let Some(description) = data["description"].as_str() { if description.contains("DEPRECATED") { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL is deprecated".to_string(), )); } if description.starts_with("Moved to") { let url = url::Url::parse( description .trim_start_matches("Moved to ") .trim_end_matches('.'), ) .unwrap(); return Box::pin(check_repository_url_canonical(url, version)).await; } if description.contains("has moved") { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL has moved".to_string(), )); } if description.starts_with("Mirror of ") { let url = url::Url::parse( description .trim_start_matches("Mirror of ") .trim_end_matches('.'), ) .unwrap(); return Box::pin(check_repository_url_canonical(url, version)).await; } } if let Some(homepage) = data["homepage"].as_str() { if is_gitlab_site(homepage, None).await { return Err(crate::CanonicalizeError::InvalidUrl( url, format!("homepage is on GitLab: {}", homepage), )); } } // TODO(jelmer): Look at the contents of the repository; if it contains just a // single README file with < 10 lines, assume the worst. // return data['clone_url'] Ok(url::Url::parse(data["clone_url"].as_str().unwrap()).unwrap()) } Err(crate::HTTPJSONError::Error { status: 404, .. }) => { return Err(crate::CanonicalizeError::InvalidUrl( url, "GitHub URL does not exist".to_string(), )) } Err(crate::HTTPJSONError::Error { status: 403, .. }) => { return Err(crate::CanonicalizeError::Unverifiable( url, "GitHub URL rate-limited".to_string(), )) } Err(e) => { return Err(crate::CanonicalizeError::Unverifiable( url, format!("GitHub URL failed to load: {:?}", e), )) } }?; } let is_valid = probe_upstream_branch_url(&url, version).await; if is_valid.is_none() { return Err(crate::CanonicalizeError::Unverifiable( url, "unable to probe".to_string(), )); } if is_valid.unwrap() { return Ok(url); } Err(crate::CanonicalizeError::InvalidUrl( url, "unable to successfully probe URL".to_string(), )) } pub async fn is_gitlab_site(hostname: &str, net_access: Option) -> bool { if KNOWN_GITLAB_SITES.contains(&hostname) { return true; } if hostname.starts_with("gitlab.") { return true; } if net_access.unwrap_or(false) { probe_gitlab_host(hostname).await } else { false } } pub async fn probe_gitlab_host(hostname: &str) -> bool { let url = format!("https://{}/api/v4/version", hostname); match crate::load_json_url(&url::Url::parse(url.as_str()).unwrap(), None).await { Ok(_data) => true, Err(crate::HTTPJSONError::Error { status: 401, response, .. }) => { if let Ok(data) = response.json::().await { if let Some(message) = data["message"].as_str() { if message == "401 Unauthorized" { true } else { debug!("failed to parse JSON response: {:?}", data); false } } else { debug!("failed to parse JSON response: {:?}", data); false } } else { debug!("failed to parse JSON response"); false } } Err(e) => { debug!("failed to probe GitLab host: {:?}", e); false } } } pub async fn guess_repo_from_url(url: &url::Url, net_access: Option) -> Option { let net_access = net_access.unwrap_or(false); let path_segments = url.path_segments().unwrap().collect::>(); match url.host_str()? { "github.com" => { if path_segments.len() < 2 { return None; } Some( with_path_segments(url, &path_segments[0..2]) .unwrap() .to_string(), ) } "travis-ci.org" => { if path_segments.len() < 2 { return None; } Some(format!( "https://github.com/{}/{}", path_segments[0], path_segments[1] )) } "coveralls.io" => { if path_segments.len() < 3 { return None; } if path_segments[0] != "r" { return None; } Some(format!( "https://github.com/{}/{}", path_segments[1], path_segments[2] )) } "launchpad.net" => Some( url::Url::parse(format!("https://code.launchpad.net/{}", path_segments[0]).as_str()) .unwrap() .to_string(), ), "git.savannah.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "git" { return None; } Some(url.to_string()) } "freedesktop.org" | "www.freedesktop.org" => { if path_segments.len() >= 2 && path_segments[0] == "software" { Some( url::Url::parse( format!("https://github.com/freedesktop/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } else if path_segments.len() >= 3 && path_segments[0..2] == ["wiki", "Software"] { Some( url::Url::parse( format!("https://github.com/freedesktop/{}", path_segments[2]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "download.gnome.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "sources" { return None; } Some( url::Url::parse( format!("https://gitlab.gnome.org/GNOME/{}.git", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } "download.kde.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "stable" && path_segments[0] != "unstable" { return None; } Some( url::Url::parse(format!("https://invent.kde.org/{}", path_segments[1]).as_str()) .unwrap() .to_string(), ) } "ftp.gnome.org" => { if path_segments.len() >= 4 && path_segments[0] == "pub" && path_segments[1] == "GNOME" && path_segments[2] == "sources" { Some( url::Url::parse( format!("https://gitlab.gnome.org/GNOME/{}.git", path_segments[3]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "sourceforge.net" => { if path_segments.len() >= 4 && path_segments[0] == "p" && path_segments[3] == "ci" { Some( url::Url::parse( format!( "https://sourceforge.net/p/{}/{}", path_segments[1], path_segments[2] ) .as_str(), ) .unwrap() .to_string(), ) } else { None } } "www.apache.org" => { if path_segments.len() >= 2 && path_segments[0] == "dist" { Some( url::Url::parse( format!("https://svn.apache.org/repos/asf/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } else { None } } "bitbucket.org" => { if path_segments.len() < 2 { return None; } Some( with_path_segments(url, &path_segments[0..2]) .unwrap() .to_string(), ) } "ftp.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "gnu" { return None; } Some( url::Url::parse( format!("https://git.savannah.gnu.org/git/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } "download.savannah.gnu.org" => { if path_segments.len() < 2 { return None; } if path_segments[0] != "releases" { return None; } Some( url::Url::parse( format!("https://git.savannah.gnu.org/git/{}", path_segments[1]).as_str(), ) .unwrap() .to_string(), ) } u if is_gitlab_site(u, Some(net_access)).await => { if path_segments.is_empty() { return None; } let proj_segments = if path_segments.contains(&"-") { path_segments[0..path_segments.iter().position(|s| s.contains('-')).unwrap()] .to_vec() } else if path_segments.contains(&"tags") { path_segments[0..path_segments.iter().position(|s| s == &"tags").unwrap()].to_vec() } else if path_segments.contains(&"blob") { path_segments[0..path_segments.iter().position(|s| s == &"blob").unwrap()].to_vec() } else { path_segments.to_vec() }; Some(with_path_segments(url, &proj_segments).unwrap().to_string()) } "git.php.net" => { if path_segments[0] == "repository" { Some(url.to_string()) } else if path_segments.is_empty() { let qs = url.query_pairs().collect::>(); qs.get("p") .map(|p| { url::Url::parse(format!("https://git.php.net/repository/?{}", p).as_str()) .unwrap() }) .map(|u| u.to_string()) } else { None } } u if KNOWN_HOSTING_SITES.contains(&u) => Some(url.to_string()), u if u.starts_with("svn.") => { // 'svn' subdomains are often used for hosting SVN repositories Some(url.to_string()) } _ => { if net_access { match check_repository_url_canonical(url.clone(), None).await { Ok(url) => Some(url.to_string()), Err(_) => { debug!("Failed to canonicalize URL: {}", url); None } } } else { None } } } } #[tokio::test] async fn test_guess_repo_url() { assert_eq!( Some("https://github.com/jelmer/blah".to_string()), guess_repo_from_url( &"https://github.com/jelmer/blah".parse().unwrap(), Some(false) ) .await ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), guess_repo_from_url( &"https://github.com/jelmer/blah/blob/README" .parse() .unwrap(), Some(false) ) .await ); assert_eq!( None, guess_repo_from_url(&"https://github.com/jelmer".parse().unwrap(), Some(false)).await ); assert_eq!( None, guess_repo_from_url(&"https://www.jelmer.uk/".parse().unwrap(), Some(false)).await ); assert_eq!( Some("http://code.launchpad.net/blah".to_string()), guess_repo_from_url( &"http://code.launchpad.net/blah".parse().unwrap(), Some(false) ) .await, ); assert_eq!( Some("https://code.launchpad.net/bzr".to_string()), guess_repo_from_url( &"http://launchpad.net/bzr/+download".parse().unwrap(), Some(false) ) .await, ); assert_eq!( Some("https://git.savannah.gnu.org/git/auctex.git".to_string()), guess_repo_from_url( &"https://git.savannah.gnu.org/git/auctex.git" .parse() .unwrap(), Some(false) ) .await, ); assert_eq!( None, guess_repo_from_url( &"https://git.savannah.gnu.org/blah/auctex.git" .parse() .unwrap(), Some(false) ) .await, ); assert_eq!( Some("https://bitbucket.org/fenics-project/dolfin".to_string()), guess_repo_from_url( &"https://bitbucket.org/fenics-project/dolfin/downloads/" .parse() .unwrap(), Some(false) ) .await, ); } pub async fn canonical_git_repo_url(repo_url: &Url, net_access: Option) -> Option { if let Some(hostname) = repo_url.host_str() { if (is_gitlab_site(hostname, net_access).await || hostname == "github.com") && !repo_url.path().ends_with(".git") { let mut url = repo_url.clone(); url.set_path(&(url.path().to_owned() + ".git")); return Some(url); } } None } pub async fn browse_url_from_repo_url( location: &VcsLocation, net_access: Option, ) -> Option { if location.url.host_str() == Some("github.com") { let mut path = location .url .path_segments() .unwrap() .take(3) .collect::>() .join("/"); if path.ends_with(".git") { path = path[..path.len() - 4].to_string(); } if location.subpath.is_some() || location.branch.is_some() { path.push_str(&format!( "/tree/{}", location.branch.as_deref().unwrap_or("HEAD") )); } if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/{}", subpath_str)); } Some( Url::parse("https://github.com") .unwrap() .join(&path) .unwrap(), ) } else if location.url.host_str() == Some("gopkg.in") { let mut els = location .url .path_segments() .unwrap() .take(3) .collect::>(); if els.len() != 2 { return None; } if let Some(version) = els[2].strip_prefix(".v") { els[2] = ""; let mut path = els.join("/"); path.push_str(&format!("/tree/{}", version)); if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/{}", subpath_str)); } Some( Url::parse("https://github.com") .unwrap() .join(&path) .unwrap(), ) } else { None } } else if location.url.host_str() == Some("code.launchpad.net") || location.url.host_str() == Some("launchpad.net") { let mut path = location.url.path().to_string(); if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/view/head:{}", subpath_str)); return Some( Url::parse(format!("https://bazaar.launchpad.net{}", path).as_str()).unwrap(), ); } else { return Some( Url::parse(format!("https://code.launchpad.net{}", path).as_str()).unwrap(), ); } } else if location.url.host_str() == Some("svn.apache.org") { let path_elements = location .url .path_segments() .map(|segments| segments.into_iter().collect::>()) .unwrap_or_default(); if path_elements.len() >= 2 && path_elements[0] == "repos" && path_elements[1] == "asf" { let mut path_elements = path_elements.into_iter().skip(1).collect::>(); path_elements[0] = "viewvc"; if let Some(subpath_str) = location.subpath.as_deref() { path_elements.push(subpath_str); } return Some( Url::parse(format!("https://svn.apache.org{}", path_elements.join("/")).as_str()) .unwrap(), ); } else { None } } else if location.url.host_str() == Some("git.savannah.gnu.org") || location.url.host_str() == Some("git.sv.gnu.org") { let mut path_elements = location.url.path_segments().unwrap().collect::>(); if location.url.scheme() == "https" && path_elements.first() == Some(&"git") { path_elements.remove(0); } path_elements.insert(0, "cgit"); if let Some(subpath_str) = location.subpath.as_deref() { path_elements.push("tree"); path_elements.push(subpath_str); } Some( Url::parse(format!("https://git.savannah.gnu.org{}", path_elements.join("/")).as_str()) .unwrap(), ) } else if location.url.host_str().is_some() && is_gitlab_site(location.url.host_str().unwrap(), net_access).await { let mut path = location.url.path().to_string(); if path.ends_with(".git") { path = path[..path.len() - 4].to_string(); } if let Some(subpath_str) = location.subpath.as_deref() { path.push_str(&format!("/-/blob/HEAD/{}", subpath_str)); } Some( Url::parse(format!("https://{}{}", location.url.host_str().unwrap(), path).as_str()) .unwrap(), ) } else { None } } pub async fn find_public_repo_url(repo_url: &str, net_access: Option) -> Option { let parsed = match Url::parse(repo_url) { Ok(parsed) => parsed, Err(_) => { if repo_url.contains(':') { let re = regex!(r"^(?P[^@:/]+@)?(?P[^/:]+):(?P.*)$"); if let Some(captures) = re.captures(repo_url) { let host = captures.name("host").unwrap().as_str(); let path = captures.name("path").unwrap().as_str(); if host == "github.com" || is_gitlab_site(host, net_access).await { return Some(format!("https://{}/{}", host, path)); } } } return None; } }; let revised_url: Option; match parsed.host_str() { Some("github.com") => { if ["https", "http", "git"].contains(&parsed.scheme()) { return Some(repo_url.to_string()); } revised_url = Some( Url::parse("https://github.com") .unwrap() .join(parsed.path()) .unwrap() .to_string(), ); } Some(hostname) if is_gitlab_site(hostname, net_access).await => { if ["https", "http"].contains(&parsed.scheme()) { return Some(repo_url.to_string()); } if parsed.scheme() == "ssh" { revised_url = Some(format!( "https://{}{}", parsed.host_str().unwrap(), parsed.path(), )); } else { revised_url = None; } } Some("code.launchpad.net") | Some("bazaar.launchpad.net") | Some("git.launchpad.net") => { if parsed.scheme().starts_with("http") || parsed.scheme() == "lp" { return Some(repo_url.to_string()); } if ["ssh", "bzr+ssh"].contains(&parsed.scheme()) { revised_url = Some(format!( "https://{}{}", parsed.host_str().unwrap(), parsed.path() )); } else { revised_url = None; } } _ => revised_url = None, } revised_url } pub fn fixup_rcp_style_git_repo_url(url: &str) -> Option { breezyshim::location::rcp_location_to_url(url).ok() } pub fn try_open_branch( url: &url::Url, branch_name: Option<&str>, ) -> Option> { let old_ui_factory = breezyshim::ui::get_ui_factory(); breezyshim::ui::install_ui_factory(&breezyshim::ui::SilentUIFactory::new()); let controldir = match breezyshim::controldir::open(url, None) { Ok(c) => c, Err(_) => return None, }; let rev = match controldir.open_branch(branch_name) { Ok(b) => { b.last_revision(); Some(b) } Err(_) => None, }; breezyshim::ui::install_ui_factory(old_ui_factory.as_ref()); rev } pub async fn find_secure_repo_url( mut url: url::Url, branch: Option<&str>, net_access: Option, ) -> Option { if SECURE_SCHEMES.contains(&url.scheme()) { return Some(url); } // Sites we know to be available over https if let Some(hostname) = url.host_str() { if is_gitlab_site(hostname, net_access).await || [ "github.com", "git.launchpad.net", "bazaar.launchpad.net", "code.launchpad.net", ] .contains(&hostname) { url = derive_with_scheme(&url, "https"); } } if url.scheme() == "lp" { url = derive_with_scheme(&url, "https"); url.set_host(Some("code.launchpad.net")).unwrap(); } if let Some(host) = url.host_str() { if ["git.savannah.gnu.org", "git.sv.gnu.org"].contains(&host) { if url.scheme() == "http" { url = derive_with_scheme(&url, "https"); } else { url = derive_with_scheme(&url, "https"); url.set_path(format!("/git{}", url.path()).as_str()); } } } else { return None; } if net_access.unwrap_or(true) { let secure_repo_url = derive_with_scheme(&url, "https"); let insecure_branch = try_open_branch(&url, branch); let secure_branch = try_open_branch(&secure_repo_url, branch); if let Some(secure_branch) = secure_branch { if insecure_branch.is_none() || secure_branch.last_revision() == insecure_branch.unwrap().last_revision() { url = secure_repo_url; } } } if SECURE_SCHEMES.contains(&url.scheme()) { Some(url) } else { // Can't find a secure URI :( None } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct VcsLocation { pub url: url::Url, pub branch: Option, pub subpath: Option, } impl VcsLocation { async fn from_str(url: &str) -> Self { let (url, branch, subpath) = split_vcs_url(url); let url = fixup_git_url(url.as_str()).await; VcsLocation { url: url.parse().unwrap(), branch, subpath, } } } impl std::fmt::Display for VcsLocation { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", unsplit_vcs_url(self)) } } impl From for url::Url { fn from(v: VcsLocation) -> Self { v.url } } impl From for VcsLocation { fn from(url: url::Url) -> Self { VcsLocation { url, branch: None, subpath: None, } } } fn derive_with_scheme(url: &url::Url, scheme: &str) -> url::Url { let mut s = url.to_string(); s.replace_range(..url.scheme().len(), scheme); url::Url::parse(&s).unwrap() } fn fix_path_in_port(url: &str) -> Option { let (_, scheme, host, port, rest) = match lazy_regex::regex_captures!(r"^([^:]+)://([^:]+):([^/]+)(/.*)$", url) { Some(c) => c, None => return None, }; if port.ends_with(']') { return None; } if let Ok(port) = port.parse::() { Some(format!("{}://{}:{}{}", scheme, host, port, rest)) } else { Some(format!("{}://{}/{}{}", scheme, host, port, rest)) } } async fn fix_gitlab_scheme(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if let Some(host) = url.host_str() { if is_gitlab_site(host, None).await { return Some(derive_with_scheme(&url, "https").to_string()); } } } None } fn fix_github_scheme(url: &str) -> Option { // GitHub no longer supports the git:// scheme if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("github.com") { return Some(derive_with_scheme(&url, "https").to_string()); } } None } fn fix_salsa_cgit_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("salsa.debian.org") { if let Some(suffix) = url.path().strip_prefix("/cgit/") { let mut url = url.clone(); url.set_path(suffix); return Some(url.to_string()); } } } None } async fn fix_gitlab_tree_in_url(location: &VcsLocation) -> Option { if is_gitlab_site(location.url.host_str()?, None).await { let segments = location.url.path_segments().unwrap().collect::>(); if let Some(p) = segments.iter().position(|p| *p == "tree") { let branch = segments[(p + 1)..].join("/"); let path = segments[..p].join("/"); let mut url = location.url.clone(); url.set_path(path.as_str()); return Some(VcsLocation { url, branch: Some(branch), subpath: location.subpath.clone(), }); } } None } fn fix_double_slash(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if url.path().starts_with("//") { let path = url .path() .to_string() .strip_prefix("//") .unwrap() .to_string(); url.set_path(path.as_str()); return Some(url.to_string()); } } None } fn fix_extra_colon(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if url.path().starts_with(':') { let path = url .path() .to_string() .strip_prefix(':') .unwrap() .to_string(); url.set_path(&path); return Some(url.to_string()); } } None } fn drop_git_username(url: &str) -> Option { if let Ok(mut url) = url::Url::parse(url) { if let Some(host) = url.host_str() { if !["github.com", "salsa.debian.org", "gitlab.com"].contains(&host) { return None; } } else { return None; } if !["git", "http", "https"].contains(&url.scheme()) { return None; } if url.username() == "git" { url.set_username("").unwrap(); return Some(url.to_string()); } } None } fn fix_branch_argument(location: &VcsLocation) -> Option { if location.url.host_str() == Some("github.com") { // TODO(jelmer): Handle gitlab sites too? let path_elements = location.url.path_segments().unwrap().collect::>(); if path_elements.len() > 2 && path_elements[2] == "tree" { let branch = path_elements[3..].join("/"); let path = path_elements[..2].join("/"); let mut url = location.url.clone(); url.set_path(path.as_str()); Some(VcsLocation { url, branch: Some(branch), subpath: location.subpath.clone(), }) } else { None } } else { None } } fn fix_git_gnome_org_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("git.gnome.org") { let mut path_segments = url.path_segments().unwrap().collect::>(); if path_segments.first().map(|p| *p == "browse") == Some(true) { path_segments.remove(0); } let mut url = derive_with_scheme(&url, "https"); url.set_host(Some("gitlab.gnome.org")).unwrap(); url.set_path(format!("GNOME/{}", path_segments.join("/")).as_str()); return Some(url.to_string()); } } None } fn fix_kde_anongit_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("anongit.kde.org") { let url = derive_with_scheme(&url, "https"); return Some(url.to_string()); } } None } fn fix_freedesktop_org_url(url: &str) -> Option { if let Ok(url) = url::Url::parse(url) { if url.host_str() == Some("anongit.freedesktop.org") { let suffix = url.path().strip_prefix("/git/"); let mut url = derive_with_scheme(&url, "https"); if let Some(suffix) = suffix { url.set_path(suffix); } url.set_host(Some("gitlab.freedesktop.org")).unwrap(); return Some(url.to_string()); } } None } type AsyncLocationFixer = for<'a> fn( &'a VcsLocation, ) -> std::pin::Pin< Box> + Send + 'a>, >; const LOCATION_FIXERS: &[AsyncLocationFixer] = &[ |loc| Box::pin(async move { fix_gitlab_tree_in_url(loc).await }), |loc| Box::pin(async move { fix_branch_argument(loc) }), ]; /// Attempt to fix up broken Git URLs. pub async fn fixup_git_location(location: &VcsLocation) -> Cow<'_, VcsLocation> { let mut location = Cow::Borrowed(location); for cb in LOCATION_FIXERS { location = cb(&location).await.map_or(location, Cow::Owned); } location } type AsyncFixer = for<'a> fn( &'a str, ) -> std::pin::Pin< Box> + Send + 'a>, >; const URL_FIXERS: &[AsyncFixer] = &[ |url| Box::pin(async move { fix_path_in_port(url) }), |url| Box::pin(async move { fix_gitlab_scheme(url).await }), |url| Box::pin(async move { fix_github_scheme(url) }), |url| Box::pin(async move { fix_salsa_cgit_url(url) }), |url| Box::pin(async move { fix_double_slash(url) }), |url| Box::pin(async move { fix_extra_colon(url) }), |url| Box::pin(async move { drop_git_username(url) }), |url| Box::pin(async move { fix_freedesktop_org_url(url) }), |url| Box::pin(async move { fix_kde_anongit_url(url) }), |url| Box::pin(async move { fix_git_gnome_org_url(url) }), ]; pub async fn fixup_git_url(url: &str) -> String { let mut url = url.to_string(); for cb in URL_FIXERS { url = cb(&url).await.unwrap_or(url); } url } pub fn convert_cvs_list_to_str(urls: &[&str]) -> Option { if urls[0].starts_with(":extssh:") || urls[0].starts_with(":pserver:") { let url = breezyshim::location::cvs_to_url(urls[0]); Some(format!("{}#{}", url, urls[1])) } else { None } } type AsyncSanitizer = for<'a> fn( &'a str, ) -> std::pin::Pin< Box> + Send + 'a>, >; pub const SANITIZERS: &[AsyncSanitizer] = &[ |url| Box::pin(async move { drop_vcs_in_scheme(&url.parse().ok()?) }), |url| { Box::pin(async move { Some( fixup_git_location(&VcsLocation::from_str(url).await) .await .url .clone(), ) }) }, |url| Box::pin(async move { fixup_rcp_style_git_repo_url(url) }), |url| { Box::pin(async move { find_public_repo_url(url.to_string().as_str(), None) .await .and_then(|u| u.parse().ok()) }) }, |url| Box::pin(async move { canonical_git_repo_url(&url.parse().ok()?, None).await }), |url| Box::pin(async move { find_secure_repo_url(url.parse().ok()?, None, Some(false)).await }), ]; pub async fn sanitize_url(url: &str) -> String { let mut url: Cow<'_, str> = Cow::Borrowed(url); for sanitizer in SANITIZERS { url = sanitizer(url.as_ref()) .await .map_or(url, |f| Cow::Owned(f.to_string())); } url.into_owned() } #[cfg(test)] mod tests { use super::fixup_git_url; async fn fixup_git_location(url: &str) -> String { super::fixup_git_location(&super::VcsLocation::from_str(url).await) .await .to_string() } #[test] fn test_plausible_url() { use super::plausible_url; assert!(!plausible_url("the")); assert!(!plausible_url("1")); assert!(plausible_url("git@foo:blah")); assert!(plausible_url("git+ssh://git@foo/blah")); assert!(plausible_url("https://foo/blah")); } #[tokio::test] async fn test_is_gitlab_site() { use super::is_gitlab_site; assert!(is_gitlab_site("gitlab.com", Some(false)).await); assert!(is_gitlab_site("gitlab.example.com", Some(false)).await); assert!(is_gitlab_site("salsa.debian.org", Some(false)).await); assert!(!is_gitlab_site("github.com", Some(false)).await); assert!(!is_gitlab_site("foo.example.com", Some(false)).await); } #[tokio::test] async fn test_canonicalize_github() { use super::canonical_git_repo_url; use url::Url; assert_eq!( Some( "https://github.com/jelmer/example.git" .parse::() .unwrap() ), canonical_git_repo_url( &"https://github.com/jelmer/example".parse::().unwrap(), Some(false) ) .await ); } #[tokio::test] async fn test_canonicalize_github_ssh() { use super::canonical_git_repo_url; use url::Url; assert_eq!( Some( "https://salsa.debian.org/jelmer/example.git" .parse::() .unwrap() ), canonical_git_repo_url( &"https://salsa.debian.org/jelmer/example" .parse::() .unwrap(), Some(false) ) .await ); assert_eq!( None, canonical_git_repo_url( &"https://salsa.debian.org/jelmer/example.git" .parse::() .unwrap(), Some(false) ) .await ); } #[tokio::test] async fn test_find_public_github() { use super::find_public_repo_url; assert_eq!( "https://github.com/jelmer/example", find_public_repo_url("ssh://git@github.com/jelmer/example", Some(false)) .await .unwrap() ); assert_eq!( Some("https://github.com/jelmer/example"), find_public_repo_url("https://github.com/jelmer/example", Some(false)) .await .as_deref() ); assert_eq!( "https://github.com/jelmer/example", find_public_repo_url("git@github.com:jelmer/example", Some(false)) .await .unwrap() .as_str() ); } #[tokio::test] async fn test_find_public_salsa() { use super::find_public_repo_url; assert_eq!( "https://salsa.debian.org/jelmer/example", find_public_repo_url("ssh://salsa.debian.org/jelmer/example", Some(false)) .await .unwrap() .as_str() ); assert_eq!( "https://salsa.debian.org/jelmer/example", find_public_repo_url("https://salsa.debian.org/jelmer/example", Some(false)) .await .unwrap() .as_str() ); } #[test] fn test_fixup_rcp_style() { use super::fixup_rcp_style_git_repo_url; use url::Url; assert_eq!( Some( "ssh://git@github.com/jelmer/example" .parse::() .unwrap() ), fixup_rcp_style_git_repo_url("git@github.com:jelmer/example") ); assert_eq!( Some("ssh://github.com/jelmer/example".parse::().unwrap()), fixup_rcp_style_git_repo_url("github.com:jelmer/example") ); } #[test] fn test_fixup_rcp_leave() { use super::fixup_rcp_style_git_repo_url; assert_eq!( None, fixup_rcp_style_git_repo_url("https://salsa.debian.org/jelmer/example") ); assert_eq!( None, fixup_rcp_style_git_repo_url("ssh://git@salsa.debian.org/jelmer/example") ); } #[tokio::test] async fn test_guess_repo_url_travis_ci_org() { use super::guess_repo_from_url; assert_eq!( Some("https://github.com/jelmer/dulwich"), guess_repo_from_url( &"https://travis-ci.org/jelmer/dulwich".parse().unwrap(), Some(false) ) .await .as_deref(), ); } #[tokio::test] async fn test_guess_repo_url_coveralls() { use super::guess_repo_from_url; assert_eq!( Some("https://github.com/jelmer/dulwich"), guess_repo_from_url( &"https://coveralls.io/r/jelmer/dulwich".parse().unwrap(), Some(false) ) .await .as_deref(), ); } #[tokio::test] async fn test_guess_repo_url_gitlab() { use super::guess_repo_from_url; assert_eq!( Some("https://gitlab.com/jelmer/dulwich"), guess_repo_from_url( &"https://gitlab.com/jelmer/dulwich".parse().unwrap(), Some(false) ) .await .as_deref(), ); assert_eq!( Some("https://gitlab.com/jelmer/dulwich"), guess_repo_from_url( &"https://gitlab.com/jelmer/dulwich/tags".parse().unwrap(), Some(false) ) .await .as_deref(), ); } #[tokio::test] async fn test_fixup_git_location() { use super::{fixup_git_location, VcsLocation}; assert_eq!( VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }, fixup_git_location(&VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }) .await .into_owned() ); } #[tokio::test] async fn test_browse_url_from_repo() { use super::browse_url_from_repo_url; assert_eq!( Some("https://github.com/jelmer/dulwich".parse().unwrap()), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich".parse().unwrap(), branch: None, subpath: None, }, Some(false) ) .await ); assert_eq!( Some("https://github.com/jelmer/dulwich".parse().unwrap()), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: None, subpath: None, }, Some(false) ) .await ); assert_eq!( Some( "https://github.com/jelmer/dulwich/tree/foo" .parse() .unwrap() ), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: Some("foo".to_string()), subpath: None, }, Some(false) ) .await ); assert_eq!( Some( "https://github.com/jelmer/dulwich/tree/HEAD/foo" .parse() .unwrap() ), browse_url_from_repo_url( &super::VcsLocation { url: "https://github.com/jelmer/dulwich.git".parse().unwrap(), branch: None, subpath: Some("foo".to_string()), }, Some(false) ) .await ); } #[test] fn test_fix_github_scheme() { use super::fix_github_scheme; assert_eq!( Some("https://github.com/jelmer/example"), fix_github_scheme("git://github.com/jelmer/example").as_deref() ); } #[test] fn test_fix_git_gnome_org_url() { use super::fix_git_gnome_org_url; assert_eq!( Some("https://gitlab.gnome.org/GNOME/example".to_string()), fix_git_gnome_org_url("https://git.gnome.org/browse/example") ); } #[tokio::test] async fn test_fixup() { assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("https://github.com:jelmer/dulwich").await, ); assert_eq!( "https://github.com/jelmer/dulwich -b blah", fixup_git_location("https://github.com:jelmer/dulwich -b blah").await ); assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("git://github.com/jelmer/dulwich").await, ); } #[tokio::test] async fn test_preserves() { assert_eq!( "https://github.com/jelmer/dulwich", fixup_git_url("https://github.com/jelmer/dulwich").await ); } #[tokio::test] async fn test_salsa_not_https() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich", fixup_git_url("git://salsa.debian.org/jelmer/dulwich").await ); } #[tokio::test] async fn test_salsa_uses_cgit() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich", fixup_git_url("https://salsa.debian.org/cgit/jelmer/dulwich").await ); } #[tokio::test] async fn test_salsa_tree_branch() { assert_eq!( "https://salsa.debian.org/jelmer/dulwich -b master", fixup_git_location("https://salsa.debian.org/jelmer/dulwich/tree/master").await ); } #[tokio::test] async fn test_strip_extra_slash() { assert_eq!( "https://salsa.debian.org/salve/auctex.git", fixup_git_url("https://salsa.debian.org//salve/auctex.git").await ); } #[tokio::test] async fn test_strip_extra_colon() { assert_eq!( "https://salsa.debian.org/mckinstry/lcov.git", fixup_git_url("https://salsa.debian.org:/mckinstry/lcov.git").await ); } #[tokio::test] async fn test_strip_username() { assert_eq!( "https://github.com/RPi-Distro/pgzero.git", fixup_git_url("git://git@github.com:RPi-Distro/pgzero.git").await ); assert_eq!( "https://salsa.debian.org/debian-astro-team/pyavm.git", fixup_git_url("https://git@salsa.debian.org:debian-astro-team/pyavm.git").await ); } #[tokio::test] async fn test_github_tree_url() { assert_eq!( "https://github.com/blah/blah -b master", fixup_git_location("https://github.com/blah/blah/tree/master").await ); } #[tokio::test] async fn test_freedesktop() { assert_eq!( "https://gitlab.freedesktop.org/xorg/xserver", fixup_git_url("git://anongit.freedesktop.org/xorg/xserver").await ); assert_eq!( "https://gitlab.freedesktop.org/xorg/lib/libSM", fixup_git_url("git://anongit.freedesktop.org/git/xorg/lib/libSM").await ); } #[tokio::test] async fn test_anongit() { assert_eq!( "https://anongit.kde.org/kdev-php.git", fixup_git_url("git://anongit.kde.org/kdev-php.git").await ); } #[tokio::test] async fn test_gnome() { assert_eq!( "https://gitlab.gnome.org/GNOME/alacarte", fixup_git_url("https://git.gnome.org/browse/alacarte").await ); } } upstream-ontologist-0.2.4/src/vcs_command.rs000064400000000000000000000137571046102023000173310ustar 00000000000000use crate::vcs; use crate::GuesserSettings; use log::warn; fn parse_command_bytes(command: &[u8]) -> Option> { if command.ends_with(&[b'\\']) { warn!( "Ignoring command with line break: {}", String::from_utf8_lossy(command) ); return None; } let command_str = match String::from_utf8(command.to_vec()) { Ok(s) => s, Err(_) => { warn!( "Ignoring command with non-UTF-8: {}", String::from_utf8_lossy(command) ); return None; } }; let args: Vec<_> = shlex::split(command_str.as_str())? .into_iter() .filter(|arg| !arg.trim().is_empty()) .collect(); if args.is_empty() { None } else { Some(args) } } pub fn url_from_vcs_command(command: &[u8]) -> Option { if let Some(url) = url_from_git_clone_command(command) { return Some(url); } if let Some(url) = url_from_fossil_clone_command(command) { return Some(url); } if let Some(url) = url_from_cvs_co_command(command) { return Some(url); } if let Some(url) = url_from_svn_co_command(command) { return Some(url); } None } pub fn url_from_git_clone_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; if args.remove(0) != "git" || args.remove(0) != "clone" { return None; } let mut i = 0; while i < args.len() { if !args[i].starts_with('-') { i += 1; continue; } if args[i].contains('=') { args.remove(i); continue; } // arguments that take a parameter if args[i] == "-b" || args[i] == "--depth" || args[i] == "--branch" { args.remove(i); args.remove(i); continue; } args.remove(i); } let url = args .get(2) .cloned() .unwrap_or_else(|| args.first().cloned().unwrap_or_default()); if vcs::plausible_url(&url) { Some(url) } else { None } } #[test] fn test_url_from_git_clone_command() { assert_eq!( url_from_git_clone_command(b"git clone https://github.com/foo/bar foo"), Some("https://github.com/foo/bar".to_string()) ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone https://github.com/jelmer/blah"), ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone https://github.com/jelmer/blah target"), ); assert_eq!( Some("https://github.com/jelmer/blah".to_string()), url_from_git_clone_command(b"git clone -b foo https://github.com/jelmer/blah target"), ); assert_eq!(None, url_from_git_clone_command(b"git ls-tree")); } pub fn url_from_fossil_clone_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; if args.remove(0) != "fossil" || args.remove(0) != "clone" { return None; } let mut i = 0; while i < args.len() { if !args[i].starts_with('-') { i += 1; continue; } if args[i].contains('=') { args.remove(i); continue; } args.remove(i); } let url = args .get(2) .cloned() .unwrap_or_else(|| args.first().cloned().unwrap_or_default()); if vcs::plausible_url(&url) { Some(url) } else { None } } #[test] fn test_url_from_fossil_clone_command() { assert_eq!( Some("https://example.com/repo/blah".to_string()), url_from_fossil_clone_command(b"fossil clone https://example.com/repo/blah blah.fossil"), ); } pub fn url_from_cvs_co_command(command: &[u8]) -> Option { let mut args = parse_command_bytes(command)?; let i = 0; let mut cvsroot = None; let mut module = None; let mut command_seen = false; if args.remove(0) != "cvs" { return None; } while i < args.len() { if args[i] == "-d" { args.remove(i); cvsroot = Some(args.remove(i)); continue; } if args[i].starts_with("-d") { cvsroot = Some(args.remove(i)[2..].to_string()); continue; } if command_seen && !args[i].starts_with('-') { module = Some(args[i].clone()); } else if args[i] == "co" || args[i] == "checkout" { command_seen = true; } args.remove(i); } if let Some(cvsroot) = cvsroot { let url = breezyshim::location::cvs_to_url(&cvsroot); if let Some(module) = module { return Some(url.join(module.as_str()).unwrap().to_string()); } return Some(url.to_string()); } None } pub fn url_from_svn_co_command(command: &[u8]) -> Option { let args = parse_command_bytes(command)?; if args[0] != "svn" || args[1] != "co" { return None; } let url_schemes = ["svn+ssh", "http", "https", "svn"]; args.into_iter().find(|arg| { url_schemes .iter() .any(|scheme| arg.starts_with(&format!("{}://", scheme))) }) } pub fn guess_from_get_orig_source( path: &std::path::Path, _settings: &GuesserSettings, ) -> Result, crate::ProviderError> { let text = std::fs::read(path)?; let mut result = Vec::new(); for line in text.split(|b| *b == b'\n') { if let Some(url) = url_from_vcs_command(line) { let certainty = if url.contains('$') { crate::Certainty::Possible } else { crate::Certainty::Likely }; result.push(crate::UpstreamDatumWithMetadata { datum: crate::UpstreamDatum::Repository(url), certainty: Some(certainty), origin: Some(path.into()), }); } } Ok(result) } upstream-ontologist-0.2.4/testdata/cabal/blah.cabal000064400000000000000000000010361046102023000204330ustar 00000000000000Name: TestPackage Version: 0.0 Cabal-Version: >= 1.2 License: BSD3 Author: Angela Author Synopsis: Small package with two programs Build-Type: Simple Bug-Reports: https://github.com/example/blah/issues Executable program1 Build-Depends: HUnit Main-Is: Main.hs Hs-Source-Dirs: prog1 Executable program2 Main-Is: Main.hs Build-Depends: HUnit Hs-Source-Dirs: prog2 Other-Modules: Utils source-repository head type: git location: https://github.com/example/blah upstream-ontologist-0.2.4/testdata/cabal/debian/control000064400000000000000000000004211046102023000213630ustar 00000000000000Source: haskell-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~) Package: libghc6-haskell-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.2.4/testdata/cabal/expected.yaml000064400000000000000000000006031046102023000212250ustar 00000000000000Author: - !Person name: Angela Author Bug-Database: https://github.com/example/blah/issues Bug-Submit: https://github.com/example/blah/issues/new Homepage: https://github.com/example/blah Name: TestPackage Repository: https://github.com/example/blah.git Repository-Browse: https://github.com/example/blah Summary: Small package with two programs Description: blah blah License: BSD3 upstream-ontologist-0.2.4/testdata/composer.json/composer.json000064400000000000000000000034241046102023000230230ustar 00000000000000{ "name": "laravel/laravel", "type": "project", "description": "The Laravel Framework.", "keywords": ["framework", "laravel"], "license": "MIT", "require": { "php": "^8.1", "guzzlehttp/guzzle": "^7.2", "laravel/framework": "^10.0", "laravel/sanctum": "^3.2", "laravel/tinker": "^2.8" }, "require-dev": { "fakerphp/faker": "^1.9.1", "laravel/pint": "^1.0", "laravel/sail": "^1.18", "mockery/mockery": "^1.4.4", "nunomaduro/collision": "^7.0", "phpunit/phpunit": "^10.0", "spatie/laravel-ignition": "^2.0" }, "autoload": { "psr-4": { "App\\": "app/", "Database\\Factories\\": "database/factories/", "Database\\Seeders\\": "database/seeders/" } }, "autoload-dev": { "psr-4": { "Tests\\": "tests/" } }, "scripts": { "post-autoload-dump": [ "Illuminate\\Foundation\\ComposerScripts::postAutoloadDump", "@php artisan package:discover --ansi" ], "post-update-cmd": [ "@php artisan vendor:publish --tag=laravel-assets --ansi --force" ], "post-root-package-install": [ "@php -r \"file_exists('.env') || copy('.env.example', '.env');\"" ], "post-create-project-cmd": [ "@php artisan key:generate --ansi" ] }, "extra": { "laravel": { "dont-discover": [] } }, "config": { "optimize-autoloader": true, "preferred-install": "dist", "sort-packages": true, "allow-plugins": { "pestphp/pest-plugin": true } }, "minimum-stability": "stable", "prefer-stable": true } upstream-ontologist-0.2.4/testdata/composer.json/expected.yaml000064400000000000000000000001451046102023000227630ustar 00000000000000Name: laravel/laravel Keywords: ['framework', 'laravel'] Summary: The Laravel Framework License: MIT upstream-ontologist-0.2.4/testdata/configure/configure000075500000000000000000000006451046102023000213740ustar 00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for GNU Autoconf 2.69. # # Report bugs to . # # # Identity of this package. PACKAGE_NAME='GNU Autoconf' PACKAGE_TARNAME='autoconf' PACKAGE_VERSION='2.69' PACKAGE_STRING='GNU Autoconf 2.69' PACKAGE_BUGREPORT='bug-autoconf@gnu.org' PACKAGE_URL='http://www.gnu.org/software/autoconf/' ... upstream-ontologist-0.2.4/testdata/configure/expected.yaml000064400000000000000000000002121046102023000221400ustar 00000000000000{"Bug-Submit": "bug-autoconf@gnu.org", "Homepage": "http://www.gnu.org/software/autoconf/", "Name": "GNU Autoconf", "Version": "2.69"} upstream-ontologist-0.2.4/testdata/copyright-meta/debian/copyright000064400000000000000000000023051046102023000236100ustar 00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Blah Upstream-Contact: An upstream contact Source: https://www.some-homepage/ X-Source: https://www.another-homepage/ X-Source-Downloaded-From: https://github.com/example/example/releases X-Upstream-Bugs: https://github.com/example/example/issues Files-Excluded: doc/manual.pdf Files: * Copyright: 2018 Somebody License: GPL-2+ License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. . This package is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. . You should have received a copy of the GNU General Public License along with this package. If not, see . . On Debian systems, the complete text of the GNU General Public License version 2 can be found in "/usr/share/common-licenses/GPL-2". upstream-ontologist-0.2.4/testdata/copyright-meta/expected.yaml000064400000000000000000000007021046102023000231170ustar 00000000000000{"Bug-Database": "https://github.com/example/example/issues", "Bug-Submit": "https://github.com/example/example/issues/new", "Contact": "An upstream contact ", "Download": "https://github.com/example/example/releases", "Homepage": "https://github.com/example/example", "Name": "Blah", "Repository": "https://github.com/example/example.git", "Repository-Browse": "https://github.com/example/example", "License": "GPL-2+"} upstream-ontologist-0.2.4/testdata/copyright-more-on-line/debian/copyright000064400000000000000000000020271046102023000251640ustar 00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: Blah Upstream-Contact: An upstream contact Source: https://github.com/example/blah, modified to do blah. Files: * Copyright: 2018 Somebody License: GPL-2+ License: GPL-2+ This package is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. . This package is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. . You should have received a copy of the GNU General Public License along with this package. If not, see . . On Debian systems, the complete text of the GNU General Public License version 2 can be found in "/usr/share/common-licenses/GPL-2". upstream-ontologist-0.2.4/testdata/copyright-more-on-line/expected.yaml000064400000000000000000000005671046102023000245030ustar 00000000000000{"Bug-Database": "https://github.com/example/blah/issues", "Bug-Submit": "https://github.com/example/blah/issues/new", "Contact": "An upstream contact ", "Homepage": "https://github.com/example/blah", "Name": "Blah", "Repository": "https://github.com/example/blah.git", "Repository-Browse": "https://github.com/example/blah", "License": "GPL-2+"} upstream-ontologist-0.2.4/testdata/doap/blah.doap000064400000000000000000000070231046102023000201770ustar 00000000000000 blah blah 2006-06-11 blah is a swiss army knife for project maintainers and developers. Lorem ipsum. python Joe Maintainer 0.2.4 trunk Pacito 2007-05-20 Added RSS 2.0 feed generation from .doap release entries using Genshi or Cheetah templates. Added support for CHANGE_LOG_EMAIL_ADDRESS environment variable to blah changelog prepare. Added parsing of wiki attribute of a .DOAP project. Implemented "blah doap search" to search Google or Yahoo for your project's home page. Added support for multiple doap files to "blah doap" Added code to check the user's distribution and offer hints on how to install dependencies. 0.2.3 trunk Ketnet 2007-04-17 0.2.2 trunk Airlines 2007-03-25 0.2.1 trunk Ambulance 2007-02-04 0.2.0 trunk Waffle Flop 2006-12-17 upstream-ontologist-0.2.4/testdata/doap/expected.yaml000064400000000000000000000007241046102023000211120ustar 00000000000000Bug-Database: http://example.com/blah/trac/newticket Contact: Joe Maintainer Homepage: http://example.com/blah/trac/ Name: blah Repository: http://example.com/blah/svn/trunk/ Repository-Browse: http://example.com/blah/trac/browser/ Description: "\n Lorem ipsum.\n " Download: http://example.com/projects/blah/ Maintainer: !Person name: Joe Maintainer Summary: "\nblah is a swiss army knife for project maintainers and developers" Wiki: http://example.com/blah/trac/ upstream-ontologist-0.2.4/testdata/gemspec/expected.yaml000064400000000000000000000006751046102023000216170ustar 00000000000000Name: jekyll License: MIT Summary: A simple, blog aware, static site generator Description: Jekyll is a simple, blog aware, static site generator. Author: - !Person name: Tom Preston-Werner Homepage: http://github.com/jekyll/jekyll Repository: https://github.com/jekyll/jekyll.git Repository-Browse: https://github.com/jekyll/jekyll Bug-Database: https://github.com/jekyll/jekyll/issues Bug-Submit: https://github.com/jekyll/jekyll/issues/new upstream-ontologist-0.2.4/testdata/gemspec/jekyll.gemspec000064400000000000000000000051701046102023000217640ustar 00000000000000# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'jekyll/version' Gem::Specification.new do |s| s.specification_version = 2 if s.respond_to? :specification_version= s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.rubygems_version = '2.2.2' s.required_ruby_version = '>= 1.9.3' s.name = 'jekyll' s.version = Jekyll::VERSION s.license = 'MIT' s.summary = "A simple, blog aware, static site generator." s.description = "Jekyll is a simple, blog aware, static site generator." s.authors = ["Tom Preston-Werner"] s.email = 'tom@mojombo.com' s.homepage = 'http://github.com/jekyll/jekyll' s.files = `git ls-files`.split($/) s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) } s.test_files = s.files.grep(%r{^(test|spec|features)/}) s.require_paths = ["lib"] s.rdoc_options = ["--charset=UTF-8"] s.extra_rdoc_files = %w[README.markdown LICENSE] s.add_runtime_dependency('liquid', "~> 2.5.5") s.add_runtime_dependency('classifier', "~> 1.3") s.add_runtime_dependency('listen', "~> 2.5") s.add_runtime_dependency('kramdown', "~> 1.3") s.add_runtime_dependency('pygments.rb', "~> 0.5.0") s.add_runtime_dependency('mercenary', "~> 0.3.1") s.add_runtime_dependency('safe_yaml', "~> 1.0") s.add_runtime_dependency('colorator', "~> 0.1") s.add_runtime_dependency('redcarpet', "~> 3.1") s.add_runtime_dependency('toml', '~> 0.1.0') s.add_runtime_dependency('jekyll-coffeescript', '~> 1.0') s.add_runtime_dependency('jekyll-sass-converter', '~> 1.0') s.add_development_dependency('rake', "~> 10.1") s.add_development_dependency('rdoc', "~> 3.11") s.add_development_dependency('redgreen', "~> 1.2") s.add_development_dependency('shoulda', "~> 3.5") s.add_development_dependency('rr', "~> 1.1") s.add_development_dependency('cucumber', "1.3.11") s.add_development_dependency('RedCloth', "~> 4.2") s.add_development_dependency('maruku', "0.7.0") s.add_development_dependency('rdiscount', "~> 1.6") s.add_development_dependency('launchy', "~> 2.3") s.add_development_dependency('simplecov', "~> 0.7") s.add_development_dependency('simplecov-gem-adapter', "~> 1.0.1") s.add_development_dependency('coveralls', "~> 0.7.0") s.add_development_dependency('mime-types', "~> 1.5") s.add_development_dependency('activesupport', '~> 3.2.13') s.add_development_dependency('jekyll_test_plugin') s.add_development_dependency('jekyll_test_plugin_malicious') s.add_development_dependency('rouge', '~> 1.3') end upstream-ontologist-0.2.4/testdata/get-orig-source/debian/get-orig-source.sh000064400000000000000000000011471046102023000253060ustar 00000000000000#!/bin/sh set -eux repack_version="$1" version="${repack_version%+repack*}" tag="v$(echo "$version" | tr '~' '.')" tmpdir=$(mktemp -d -t exampl.get-orig-source.XXXXXX) orig_dir="exampl-${version}+repack.orig" git clone -b "$tag" --depth 1 https://example.com/scm/project.git "$tmpdir/${orig_dir}" rm -rf "$tmpdir"/*.orig/src/tls/ # free, but appears to be an unused code example from gnutls export TAR_OPTIONS='--owner root --group root --mode a+rX --format ustar' tar -cJ --wildcards --exclude '.git*' -C "$tmpdir/" "${orig_dir}" \ > "../exampl_${version}+repack.orig.tar.xz" rm -rf "$tmpdir" # vim:ts=4 sw=4 et upstream-ontologist-0.2.4/testdata/get-orig-source/expected.yaml000064400000000000000000000001211046102023000231710ustar 00000000000000{"Name": "get-orig-source", "Repository": "https://example.com/scm/project.git"} upstream-ontologist-0.2.4/testdata/go/debian/control000064400000000000000000000006321046102023000207320ustar 00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any XS-Go-Import-Path: github.com/blah/blah Testsuite: autopkgtest-pkg-go Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.2.4/testdata/go/expected.yaml000064400000000000000000000005721046102023000205750ustar 00000000000000{"Bug-Database": "https://github.com/blah/blah/issues", "Bug-Submit": "https://github.com/blah/blah/issues/new", "Homepage": "https://github.com/blah/blah", "Name": "blah", "Go-Import-Path": "github.com/blah/blah", "Repository": "https://github.com/blah/blah.git", "Repository-Browse": "https://github.com/blah/blah", "Description": "blah blah", "Summary": "blah blah"} upstream-ontologist-0.2.4/testdata/homepage/debian/control000064400000000000000000000005771046102023000221220ustar 00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any Homepage: https://github.com/j-keck/arping Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.2.4/testdata/homepage/expected.yaml000064400000000000000000000005451046102023000217550ustar 00000000000000{"Bug-Database": "https://github.com/j-keck/arping/issues", "Bug-Submit": "https://github.com/j-keck/arping/issues/new", "Homepage": "https://github.com/j-keck/arping", "Name": "arping", "Repository": "https://github.com/j-keck/arping.git", "Repository-Browse": "https://github.com/j-keck/arping", "Description": "blah blah", "Summary": "blah blah"} upstream-ontologist-0.2.4/testdata/js/expected.yaml000064400000000000000000000011311046102023000205740ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Keywords: - textarea - form - ui Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.2.4/testdata/js/package.json000064400000000000000000000017061046102023000204050ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.2.4/testdata/js2/expected.yaml000064400000000000000000000011311046102023000206560ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Homepage: http://www.jacklmoore.com/autosize Demo: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.2.4/testdata/js2/package.json000064400000000000000000000016421046102023000204660ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": "http://github.com/jackmoore/autosize.git", "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.2.4/testdata/js3/expected.yaml000064400000000000000000000011311046102023000206570ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Keywords: - textarea - form - ui Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.2.4/testdata/js3/package.json000064400000000000000000000016201046102023000204630ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": "jackmoore/autosize.git", "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.2.4/testdata/line-interrupted/DESCRIPTION000064400000000000000000000010261046102023000224760ustar 00000000000000Package: tsne Type: Package Title: T-Distributed Stochastic Neighbor Embedding for R (t-SNE) Version: 0.1-3 Date: 2016-06-04 Author: Justin Donaldson Maintainer: Justin Donaldson Description: A "pure R" implementation of the t-SNE algorithm. License: GPL LazyLoad: yes NeedsCompilation: no URL: https://github.com/jdonaldson/rtsne/ BugReports: https://github.com/jdonaldson/rtsne/issues Packaged: 2016-07-15 15:40:42 UTC; jdonaldson Repository: CRAN Date/Publication: 2016-07-15 20:02:16 upstream-ontologist-0.2.4/testdata/line-interrupted/debian/upstream/metadata000064400000000000000000000000651046102023000255570ustar 00000000000000Registry: - Name: conda:conda-forge Entry: r-tsneupstream-ontologist-0.2.4/testdata/line-interrupted/expected.yaml000064400000000000000000000011031046102023000234510ustar 00000000000000Archive: CRAN Bug-Database: https://github.com/jdonaldson/rtsne/issues Bug-Submit: https://github.com/jdonaldson/rtsne/issues/new Contact: Justin Donaldson Homepage: https://github.com/jdonaldson/rtsne/ Name: tsne Repository: https://github.com/jdonaldson/rtsne.git Repository-Browse: https://github.com/jdonaldson/rtsne Description: A "pure R" implementation of the t-SNE algorithm. License: GPL Maintainer: !Person name: Justin Donaldson email: jdonaldson@gmail.com Summary: T-Distributed Stochastic Neighbor Embedding for R (t-SNE) Version: 0.1-3 upstream-ontologist-0.2.4/testdata/machine-copyright/debian/copyright000064400000000000000000000003061046102023000242650ustar 00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: autosize Upstream-Contact: Jelmer Vernooij Source: https://salsa.debian.org/jelmer/blah upstream-ontologist-0.2.4/testdata/machine-copyright/expected.yaml000064400000000000000000000012061046102023000235750ustar 00000000000000Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Contact: Jelmer Vernooij Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Keywords: - textarea - form - ui License: MIT Name: autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.2.4/testdata/machine-copyright/package.json000064400000000000000000000017061046102023000234030ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.2.4/testdata/meta.json/META.json000064400000000000000000000301451046102023000210210ustar 00000000000000{ "abstract" : "parse and validate simple name/value option pairs", "author" : [ "Somebody " ], "dynamic_config" : 0, "license" : [ "perl_5" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : 2 }, "name" : "Some-Blah", "prereqs" : { "configure" : { "requires" : { "ExtUtils::MakeMaker" : "0" }, "suggests" : { "JSON::PP" : "2.27300" } }, "develop" : { "requires" : { "Test::More" : "0", "Test::Pod" : "1.41" } }, "runtime" : { "requires" : { "List::Util" : "0", "Params::Util" : "0", "Sub::Install" : "0.921", "strict" : "0", "warnings" : "0" } }, "test" : { "recommends" : { "CPAN::Meta" : "2.120900" }, "requires" : { "ExtUtils::MakeMaker" : "0", "File::Spec" : "0", "Test::More" : "0.96" } } }, "release_status" : "stable", "resources" : { "bugtracker" : { "web" : "https://github.com/blah/Blie/issues" }, "homepage" : "https://github.com/blah/Blie", "repository" : { "type" : "git", "url" : "https://github.com/blah/Blie.git", "web" : "https://github.com/blah/Blie" } }, "version" : "0.110", "x_Dist_Zilla" : { "perl" : { "version" : "5.023008" }, "plugins" : [ { "class" : "Dist::Zilla::Plugin::Git::GatherDir", "config" : { "Dist::Zilla::Plugin::GatherDir" : { "exclude_filename" : [], "exclude_match" : [], "follow_symlinks" : 0, "include_dotfiles" : 0, "prefix" : "", "prune_directory" : [], "root" : "." }, "Dist::Zilla::Plugin::Git::GatherDir" : { "include_untracked" : 0 } }, "name" : "@RJBS/Git::GatherDir", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::CheckPrereqsIndexed", "name" : "@RJBS/CheckPrereqsIndexed", "version" : "0.017" }, { "class" : "Dist::Zilla::Plugin::CheckExtraTests", "name" : "@RJBS/CheckExtraTests", "version" : "0.028" }, { "class" : "Dist::Zilla::Plugin::PromptIfStale", "config" : { "Dist::Zilla::Plugin::PromptIfStale" : { "check_all_plugins" : 0, "check_all_prereqs" : 0, "modules" : [ "Dist::Zilla::PluginBundle::RJBS" ], "phase" : "build", "skip" : [] } }, "name" : "@RJBS/RJBS-Outdated", "version" : "0.047" }, { "class" : "Dist::Zilla::Plugin::PromptIfStale", "config" : { "Dist::Zilla::Plugin::PromptIfStale" : { "check_all_plugins" : 1, "check_all_prereqs" : 0, "modules" : [], "phase" : "release", "skip" : [] } }, "name" : "@RJBS/CPAN-Outdated", "version" : "0.047" }, { "class" : "Dist::Zilla::Plugin::PruneCruft", "name" : "@RJBS/@Filter/PruneCruft", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ManifestSkip", "name" : "@RJBS/@Filter/ManifestSkip", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaYAML", "name" : "@RJBS/@Filter/MetaYAML", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::License", "name" : "@RJBS/@Filter/License", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Readme", "name" : "@RJBS/@Filter/Readme", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ExecDir", "name" : "@RJBS/@Filter/ExecDir", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ShareDir", "name" : "@RJBS/@Filter/ShareDir", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Manifest", "name" : "@RJBS/@Filter/Manifest", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::TestRelease", "name" : "@RJBS/@Filter/TestRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::ConfirmRelease", "name" : "@RJBS/@Filter/ConfirmRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::UploadToCPAN", "name" : "@RJBS/@Filter/UploadToCPAN", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MakeMaker", "config" : { "Dist::Zilla::Role::TestRunner" : { "default_jobs" : 9 } }, "name" : "@RJBS/MakeMaker", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::AutoPrereqs", "name" : "@RJBS/AutoPrereqs", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Git::NextVersion", "config" : { "Dist::Zilla::Plugin::Git::NextVersion" : { "first_version" : "0.001", "version_by_branch" : 0, "version_regexp" : "(?^:^([0-9]+\\.[0-9]+)$)" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/Git::NextVersion", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::PkgVersion", "name" : "@RJBS/PkgVersion", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaConfig", "name" : "@RJBS/MetaConfig", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::MetaJSON", "name" : "@RJBS/MetaJSON", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::NextRelease", "name" : "@RJBS/NextRelease", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Test::ChangesHasContent", "name" : "@RJBS/Test::ChangesHasContent", "version" : "0.008" }, { "class" : "Dist::Zilla::Plugin::PodSyntaxTests", "name" : "@RJBS/PodSyntaxTests", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::Test::ReportPrereqs", "name" : "@RJBS/Test::ReportPrereqs", "version" : "0.024" }, { "class" : "Dist::Zilla::Plugin::Prereqs", "config" : { "Dist::Zilla::Plugin::Prereqs" : { "phase" : "test", "type" : "requires" } }, "name" : "@RJBS/TestMoreWithSubtests", "version" : "5.043" }, { "class" : "Dist::Zilla::Plugin::PodWeaver", "config" : { "Dist::Zilla::Plugin::PodWeaver" : { "config_plugins" : [ "@RJBS" ], "finder" : [ ":InstallModules", ":ExecFiles" ], "plugins" : [ { "class" : "Pod::Weaver::Plugin::EnsurePod5", "name" : "@CorePrep/EnsurePod5", "version" : "4.012" } ] } }, "name" : "@RJBS/PodWeaver", "version" : "4.006" }, { "class" : "Dist::Zilla::Plugin::GithubMeta", "name" : "@RJBS/GithubMeta", "version" : "0.54" }, { "class" : "Dist::Zilla::Plugin::Git::Check", "config" : { "Dist::Zilla::Plugin::Git::Check" : { "untracked_files" : "die" }, "Dist::Zilla::Role::Git::DirtyFiles" : { "allow_dirty" : [ "Changes", "dist.ini" ], "allow_dirty_match" : [], "changelog" : "Changes" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/@Git/Check", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Commit", "config" : { "Dist::Zilla::Plugin::Git::Commit" : { "add_files_in" : [], "commit_msg" : "v%v%n%n%c" }, "Dist::Zilla::Role::Git::DirtyFiles" : { "allow_dirty" : [ "Changes", "dist.ini" ], "allow_dirty_match" : [], "changelog" : "Changes" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." }, "Dist::Zilla::Role::Git::StringFormatter" : { "time_zone" : "local" } }, "name" : "@RJBS/@Git/Commit", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Tag", "config" : { "Dist::Zilla::Plugin::Git::Tag" : { "branch" : null, "changelog" : "Changes", "signed" : 0, "tag" : "0.110", "tag_format" : "%v", "tag_message" : "v%v" }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." }, "Dist::Zilla::Role::Git::StringFormatter" : { "time_zone" : "local" } }, "name" : "@RJBS/@Git/Tag", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Push", "config" : { "Dist::Zilla::Plugin::Git::Push" : { "push_to" : [ "origin :", "github :" ], "remotes_must_exist" : 0 }, "Dist::Zilla::Role::Git::Repo" : { "repo_root" : "." } }, "name" : "@RJBS/@Git/Push", "version" : "2.036" }, { "class" : "Dist::Zilla::Plugin::Git::Contributors", "config" : { "Dist::Zilla::Plugin::Git::Contributors" : { "include_authors" : 0, "include_releaser" : 1, "order_by" : "name", "paths" : [ "." ] } }, "name" : "@RJBS/Git::Contributors", "version" : "0.020" }, { "class" : "Dist::Zilla::Plugin::FinderCode", "name" : ":NoFiles", "version" : "5.043" } ], "zilla" : { "class" : "Dist::Zilla::Dist::Builder", "config" : { "is_trial" : "0" }, "version" : "5.043" } }, "x_contributors" : [ "Contributor 1 ", "Contributor 2 " ] } upstream-ontologist-0.2.4/testdata/meta.json/expected.yaml000064400000000000000000000005641046102023000220670ustar 00000000000000{"Bug-Database": "https://github.com/blah/Blie/issues", "Bug-Submit": "https://github.com/blah/Blie/issues/new", "Homepage": "https://github.com/blah/Blie", "Name": "Some-Blah", "Repository": "https://github.com/blah/Blie.git", "Repository-Browse": "https://github.com/blah/Blie", "Summary": "parse and validate simple name/value option pairs", "Version": "0.110"} upstream-ontologist-0.2.4/testdata/meta.yml/META.yml000064400000000000000000000122001046102023000204710ustar 00000000000000--- abstract: 'do things' author: - 'Somebody ' build_requires: ExtUtils::MakeMaker: '0' File::Spec: '0' Test::More: '0.96' configure_requires: ExtUtils::MakeMaker: '0' dynamic_config: 0 generated_by: 'Dist::Zilla version 5.043, CPAN::Meta::Converter version 2.150005' license: perl meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: '1.4' name: Blah-Blieh requires: List::Util: '0' Params::Util: '0' Sub::Install: '0.921' strict: '0' warnings: '0' resources: bugtracker: https://github.com/example/Blah/issues homepage: https://github.com/example/Blah repository: https://github.com/example/Blah.git version: '0.110' x_Dist_Zilla: perl: version: '5.023008' plugins: - class: Dist::Zilla::Plugin::Git::GatherDir config: Dist::Zilla::Plugin::GatherDir: exclude_filename: [] exclude_match: [] follow_symlinks: 0 include_dotfiles: 0 prefix: '' prune_directory: [] root: . Dist::Zilla::Plugin::Git::GatherDir: include_untracked: 0 name: '@RJBS/Git::GatherDir' version: '2.036' - class: Dist::Zilla::Plugin::PromptIfStale config: Dist::Zilla::Plugin::PromptIfStale: check_all_plugins: 1 check_all_prereqs: 0 modules: [] phase: release skip: [] name: '@RJBS/CPAN-Outdated' version: '0.047' - class: Dist::Zilla::Plugin::MakeMaker config: Dist::Zilla::Role::TestRunner: default_jobs: 9 name: '@RJBS/MakeMaker' version: '5.043' - class: Dist::Zilla::Plugin::AutoPrereqs name: '@RJBS/AutoPrereqs' version: '5.043' - class: Dist::Zilla::Plugin::Prereqs config: Dist::Zilla::Plugin::Prereqs: phase: test type: requires name: '@RJBS/TestMoreWithSubtests' version: '5.043' - class: Dist::Zilla::Plugin::PodWeaver config: Dist::Zilla::Plugin::PodWeaver: config_plugins: - '@RJBS' finder: - ':InstallModules' - ':ExecFiles' plugins: - class: Pod::Weaver::Section::Legal name: '@RJBS/Legal' version: '4.012' - class: Pod::Weaver::Plugin::Transformer name: '@RJBS/List' version: '4.012' name: '@RJBS/PodWeaver' version: '4.006' - class: Dist::Zilla::Plugin::GithubMeta name: '@RJBS/GithubMeta' version: '0.54' - class: Dist::Zilla::Plugin::Git::Check config: Dist::Zilla::Plugin::Git::Check: untracked_files: die Dist::Zilla::Role::Git::DirtyFiles: allow_dirty: - Changes - dist.ini allow_dirty_match: [] changelog: Changes Dist::Zilla::Role::Git::Repo: repo_root: . name: '@RJBS/@Git/Check' version: '2.036' - class: Dist::Zilla::Plugin::Git::Commit config: Dist::Zilla::Plugin::Git::Commit: add_files_in: [] commit_msg: v%v%n%n%c Dist::Zilla::Role::Git::DirtyFiles: allow_dirty: - Changes - dist.ini allow_dirty_match: [] changelog: Changes Dist::Zilla::Role::Git::Repo: repo_root: . Dist::Zilla::Role::Git::StringFormatter: time_zone: local name: '@RJBS/@Git/Commit' version: '2.036' - class: Dist::Zilla::Plugin::Git::Tag config: Dist::Zilla::Plugin::Git::Tag: branch: ~ changelog: Changes signed: 0 tag: '0.110' tag_format: '%v' tag_message: v%v Dist::Zilla::Role::Git::Repo: repo_root: . Dist::Zilla::Role::Git::StringFormatter: time_zone: local name: '@RJBS/@Git/Tag' version: '2.036' - class: Dist::Zilla::Plugin::Git::Push config: Dist::Zilla::Plugin::Git::Push: push_to: - 'origin :' - 'github :' remotes_must_exist: 0 Dist::Zilla::Role::Git::Repo: repo_root: . name: '@RJBS/@Git/Push' version: '2.036' - class: Dist::Zilla::Plugin::Git::Contributors config: Dist::Zilla::Plugin::Git::Contributors: include_authors: 0 include_releaser: 1 order_by: name paths: - . name: '@RJBS/Git::Contributors' version: '0.020' - class: Dist::Zilla::Plugin::FinderCode name: ':ShareFiles' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':MainModule' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':AllFiles' version: '5.043' - class: Dist::Zilla::Plugin::FinderCode name: ':NoFiles' version: '5.043' zilla: class: Dist::Zilla::Dist::Builder config: is_trial: '0' version: '5.043' x_contributors: - 'Contributor 1 ' - 'Contributor 2 ' upstream-ontologist-0.2.4/testdata/meta.yml/expected.yaml000064400000000000000000000005271046102023000217160ustar 00000000000000{"Bug-Database": "https://github.com/example/Blah/issues", "Bug-Submit": "https://github.com/example/Blah/issues/new", "Homepage": "https://github.com/example/Blah", "Name": "Blah-Blieh", "Repository": "https://github.com/example/Blah.git", "Repository-Browse": "https://github.com/example/Blah", "License": "perl", "Version": "0.110"} upstream-ontologist-0.2.4/testdata/metadata.json/expected.yaml000064400000000000000000000006521046102023000227170ustar 00000000000000Name: puppet-nginx Version: 4.3.1-rc0 Author: - !Person name: Vox Pupuli Summary: Puppet NGINX management module License: MIT Repository: https://github.com/voxpupuli/puppet-nginx.git Homepage: http://github.com/voxpupuli/puppet-nginx Bug-Database: https://github.com/voxpupuli/puppet-nginx/issues Repository-Browse: https://github.com/voxpupuli/puppet-nginx Bug-Submit: https://github.com/voxpupuli/puppet-nginx/issues/new upstream-ontologist-0.2.4/testdata/metadata.json/metadata.json000064400000000000000000000032311046102023000227010ustar 00000000000000{ "name": "puppet-nginx", "version": "4.3.1-rc0", "author": "Vox Pupuli", "summary": "Puppet NGINX management module", "license": "MIT", "source": "https://github.com/voxpupuli/puppet-nginx.git", "project_page": "http://github.com/voxpupuli/puppet-nginx", "issues_url": "https://github.com/voxpupuli/puppet-nginx/issues", "dependencies": [ { "name": "puppetlabs/concat", "version_requirement": ">= 4.1.0 < 8.0.0" }, { "name": "puppetlabs/stdlib", "version_requirement": ">= 5.0.0 < 9.0.0" } ], "requirements": [ { "name": "puppet", "version_requirement": ">= 6.1.0 < 8.0.0" } ], "operatingsystem_support": [ { "operatingsystem": "Debian", "operatingsystemrelease": [ "10", "11" ] }, { "operatingsystem": "OpenBSD" }, { "operatingsystem": "RedHat", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "CentOS", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "VirtuozzoLinux", "operatingsystemrelease": [ "6", "7" ] }, { "operatingsystem": "SLES" }, { "operatingsystem": "Solaris" }, { "operatingsystem": "AIX" }, { "operatingsystem": "FreeBSD" }, { "operatingsystem": "DragonFly" }, { "operatingsystem": "NetBSD" }, { "operatingsystem": "Archlinux" }, { "operatingsystem": "Ubuntu", "operatingsystemrelease": [ "18.04", "20.04", "22.04" ] } ] } upstream-ontologist-0.2.4/testdata/native/configure000075500000000000000000000006451046102023000207010ustar 00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for GNU Autoconf 2.69. # # Report bugs to . # # # Identity of this package. PACKAGE_NAME='GNU Autoconf' PACKAGE_TARNAME='autoconf' PACKAGE_VERSION='2.69' PACKAGE_STRING='GNU Autoconf 2.69' PACKAGE_BUGREPORT='bug-autoconf@gnu.org' PACKAGE_URL='http://www.gnu.org/software/autoconf/' ... upstream-ontologist-0.2.4/testdata/native/expected.yaml000064400000000000000000000002121046102023000214450ustar 00000000000000{"Bug-Submit": "bug-autoconf@gnu.org", "Homepage": "http://www.gnu.org/software/autoconf/", "Name": "GNU Autoconf", "Version": "2.69"} upstream-ontologist-0.2.4/testdata/override/debian/source/lintian-overrides000064400000000000000000000000571046102023000254230ustar 00000000000000blah source: upstream-metadata-file-is-missing upstream-ontologist-0.2.4/testdata/override/expected.yaml000064400000000000000000000011311046102023000217770ustar 00000000000000Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new Demo: http://www.jacklmoore.com/autosize Homepage: http://www.jacklmoore.com/autosize Name: autosize Keywords: - textarea - form - ui Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Summary: Autosize is a small, stand-alone script to automatically adjust textarea height to fit text Version: 4.0.2 upstream-ontologist-0.2.4/testdata/override/package.json000064400000000000000000000017061046102023000216100ustar 00000000000000{ "name": "autosize", "description": "Autosize is a small, stand-alone script to automatically adjust textarea height to fit text.", "version": "4.0.2", "keywords": [ "textarea", "form", "ui" ], "files": [ "dist", "src" ], "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" }, "dependencies": {}, "devDependencies": { "babel-core": "^6.26.0", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-es2015-modules-umd": "^6.24.1", "babel-preset-env": "^1.6.1", "gaze": "^1.1.2", "jshint": "^2.9.5", "uglify-js": "^3.3.16" }, "scripts": { "build": "node build" } } upstream-ontologist-0.2.4/testdata/package.json/expected.yaml000064400000000000000000000000441046102023000225250ustar 00000000000000Name: react-fixtures Version: 0.1.0 upstream-ontologist-0.2.4/testdata/package.json/package.json000064400000000000000000000026441046102023000223360ustar 00000000000000{ "name": "react-fixtures", "version": "0.1.0", "private": true, "devDependencies": { "react-scripts": "^1.0.11" }, "dependencies": { "@babel/standalone": "^7.0.0", "art": "^0.10.3", "classnames": "^2.2.5", "codemirror": "^5.40.0", "core-js": "^2.4.1", "jest-diff": "^29.4.1", "prop-types": "^15.6.0", "query-string": "^4.2.3", "react": "^15.4.1", "react-dom": "^15.4.1", "semver": "^5.5.0" }, "scripts": { "start": "react-scripts start", "prestart": "cp ../../build/oss-stable/scheduler/umd/scheduler-unstable_mock.development.js ../../build/oss-stable/scheduler/umd/scheduler-unstable_mock.production.min.js ../../build/oss-stable/react/umd/react.development.js ../../build/oss-stable/react-dom/umd/react-dom.development.js ../../build/oss-stable/react/umd/react.production.min.js ../../build/oss-stable/react-dom/umd/react-dom.production.min.js ../../build/oss-stable/react-dom/umd/react-dom-server.browser.development.js ../../build/oss-stable/react-dom/umd/react-dom-server.browser.production.min.js ../../build/oss-stable/react-dom/umd/react-dom-test-utils.development.js ../../build/oss-stable/react-dom/umd/react-dom-test-utils.production.min.js public/ && cp -a ../../build/oss-stable/. node_modules", "build": "react-scripts build && cp build/index.html build/200.html", "test": "react-scripts test --env=jsdom", "eject": "react-scripts eject" } } upstream-ontologist-0.2.4/testdata/package.json2/expected.yaml000064400000000000000000000007141046102023000226130ustar 00000000000000Name: autosize Version: 4.0.2 Demo: http://www.jacklmoore.com/autosize Author: - !Person name: Jack Moore email: hello@jacklmoore.com url: http://www.jacklmoore.com License: MIT Homepage: http://www.jacklmoore.com/autosize Repository: https://github.com/jackmoore/autosize.git Repository-Browse: https://github.com/jackmoore/autosize Bug-Database: https://github.com/jackmoore/autosize/issues Bug-Submit: https://github.com/jackmoore/autosize/issues/new upstream-ontologist-0.2.4/testdata/package.json2/package.json000064400000000000000000000006361046102023000224170ustar 00000000000000{ "name": "autosize", "version": "4.0.2", "author": { "name": "Jack Moore", "url": "http://www.jacklmoore.com", "email": "hello@jacklmoore.com" }, "main": "dist/autosize.js", "license": "MIT", "homepage": "http://www.jacklmoore.com/autosize", "demo": "http://www.jacklmoore.com/autosize", "repository": { "type": "git", "url": "http://github.com/jackmoore/autosize.git" } } upstream-ontologist-0.2.4/testdata/package.xml/expected.yaml000064400000000000000000000007131046102023000223570ustar 00000000000000Author: - !Person name: Author 1 email: author1@example.com - !Person name: Author 2 email: author2@example.com Bug-Database: https://github.com/example/tracker/issues Bug-Submit: https://github.com/example/tracker/issues/new Homepage: https://github.com/example/repo Name: blah Repository: https://github.com/example/repo.git Repository-Browse: https://github.com/example/repo Description: "\n This package does something\n " License: BSD upstream-ontologist-0.2.4/testdata/package.xml/package.xml000064400000000000000000000014241046102023000220070ustar 00000000000000 blah 1.12.4 This package does something Author 1 Author 2 Maintainer 1 Other maintainer BSD http://website.example.com/ https://github.com/example/repo https://github.com/example/tracker/issues catkin curl boost python-rospkg upstream-ontologist-0.2.4/testdata/package.xml2/expected.yaml000064400000000000000000000012101046102023000224320ustar 00000000000000Name: phalcon Summary: Phalcon is a full stack PHP framework offering low resource consumption and high performance Description: |2- Phalcon is an open source full stack framework for PHP, written as a C-extension. Phalcon is optimized for high performance. Its unique architecture allows the framework to always be memory resident, offering its functionality whenever its needed, without expensive file stats and file reads that traditional PHP frameworks employ. Version: 5.1.4 License: BSD 3-Clause License Maintainer: !Person name: Anton Vasiliev email: anton@phalcon.io Contact: Anton Vasiliev upstream-ontologist-0.2.4/testdata/package.xml2/package.xml000064400000000000000000000050421046102023000220710ustar 00000000000000 phalcon pecl.php.net Phalcon is a full stack PHP framework offering low resource consumption and high performance. Phalcon is an open source full stack framework for PHP, written as a C-extension. Phalcon is optimized for high performance. Its unique architecture allows the framework to always be memory resident, offering its functionality whenever its needed, without expensive file stats and file reads that traditional PHP frameworks employ. Anton Vasiliev jeckerson anton@phalcon.io yes Nikolaos Dimopoulos niden nikos@phalcon.io yes 2023-01-10 5.1.4 5.1.4 stable stable BSD 3-Clause License Full changelog can be found at: https://github.com/phalcon/cphalcon/blob/master/CHANGELOG-5.0.md ### Fixed - Fixed `Phalcon\Acl\Adapter\Memory::isAllowed` to not use the deprecated `ReflectionType::getClass` [#16255](https://github.com/phalcon/cphalcon/issues/16255) 7.4.1 8.1.99 1.10 phalcon upstream-ontologist-0.2.4/testdata/package.yaml/expected.yaml000064400000000000000000000010301046102023000225120ustar 00000000000000Name: css-text Version: 0.1.2.2 Summary: CSS parser and renderer License: MIT Homepage: https://github.com/yesodweb/css-text Bug-Submit: https://github.com/yesodweb/css-text/issues/new Bug-Database: https://github.com/yesodweb/css-text/issues Repository: https://github.com/yesodweb/css-text.git Repository-Browse: https://github.com/yesodweb/css-text Contact: Greg Weber Maintainer: !Person name: Greg Weber email: greg@gregweber.info Author: - !Person name: Michael Snoyman email: michael@snoyman.com upstream-ontologist-0.2.4/testdata/package.yaml/package.yaml000064400000000000000000000014441046102023000223150ustar 00000000000000name: css-text version: 0.1.2.2 synopsis: CSS parser and renderer. description: Please see the README and generated docs at category: Web, Yesod author: Michael Snoyman maintainer: Michael Snoyman , Greg Weber license: MIT github: yesodweb/css-text.git stability: Stable extra-source-files: - README.md - ChangeLog.md dependencies: - base >=4 && <5 - text >=0.11 - attoparsec >=0.10.2.0 library: source-dirs: src ghc-options: -Wall when: - condition: ! '!(impl(ghc >=8.0))' dependencies: - semigroups >=0.16.1 tests: runtests: main: runtests.hs source-dirs: test dependencies: - hspec >=1.3 - QuickCheck - css-text upstream-ontologist-0.2.4/testdata/perl/dist.ini000064400000000000000000000014361046102023000201110ustar 00000000000000name = PerlIO-eol author = Shlomi Fish license = Perl_5 copyright_holder = Audrey Tang copyright_year = 2004 [@Filter] -bundle = @Basic -remove = MakeMaker -remove = ExtraTests -remove = License -remove = Readme [AutoPrereqs] [MakeMaker::Awesome] WriteMakefile_arg = 'OBJECT' => 'eol.o' [MetaJSON] [MetaProvides::Package] [MetaResources] bugtracker.web = https://rt.cpan.org/Public/Dist/Display.html?Name=PerlIO-eol bugtracker.mailto = bug-perlio-eol@rt.cpan.org repository.url = https://github.com/shlomif/PerlIO-eol.git repository.web = https://github.com/shlomif/PerlIO-eol repository.type = git [PodCoverageTests] [PodSyntaxTests] [PruneCruft] [RewriteVersion] [RunExtraTests] [Test::CPAN::Changes] [Test::Compile] fake_home = 1 [Test::Kwalitee] [Test::TrailingSpace] upstream-ontologist-0.2.4/testdata/perl/expected.yaml000064400000000000000000000005221046102023000211250ustar 00000000000000{"Bug-Database": "https://rt.cpan.org/Public/Dist/Display.html?Name=PerlIO-eol", "Homepage": "https://github.com/shlomif/PerlIO-eol", "Name": "PerlIO-eol", "Repository": "https://github.com/shlomif/PerlIO-eol.git", "Repository-Browse": "https://github.com/shlomif/PerlIO-eol", "Copyright": "2004 Audrey Tang", "License": "Perl_5"} upstream-ontologist-0.2.4/testdata/perl-parsingerror/dist.ini000064400000000000000000000002611046102023000226170ustar 00000000000000name = IO-Blah-Blah author = Somebody license = Perl_5 [@Author::ETHER] :version = 0.097 [Prereqs] perl = 5.008 upstream-ontologist-0.2.4/testdata/perl-parsingerror/expected.yaml000064400000000000000000000000361046102023000236400ustar 00000000000000{"Name": "perl-parsingerror"} upstream-ontologist-0.2.4/testdata/pkg-info/PKG-INFO000064400000000000000000000054521046102023000203140ustar 00000000000000Metadata-Version: 2.1 Name: swh.loader.git Version: 2.1.1.dev2+g0002d5a.d20230125 Summary: Software Heritage git loader Home-page: https://forge.softwareheritage.org/diffusion/DLDG/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-loader-git Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-loader-git/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-loader-git ============== The Software Heritage Git Loader is a tool and a library to walk a local Git repository and inject into the SWH dataset all contained files that weren't known before. The main entry points are: - :class:`swh.loader.git.loader.GitLoader` for the main loader which can ingest either local or remote git repository's contents. This is the main implementation deployed in production. - :class:`swh.loader.git.from_disk.GitLoaderFromDisk` which ingests only local git clone repository. - :class:`swh.loader.git.loader.GitLoaderFromArchive` which ingests a git repository wrapped in an archive. License ------- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See top-level LICENSE file for the full text of the GNU General Public License along with this program. Dependencies ------------ ### Runtime - python3 - python3-dulwich - python3-retrying - python3-swh.core - python3-swh.model - python3-swh.storage - python3-swh.scheduler ### Test - python3-nose Requirements ------------ - implementation language, Python3 - coding guidelines: conform to PEP8 - Git access: via dulwich CLI Run ---------- You can run the loader from a remote origin (*loader*) or from an origin on disk (*from_disk*) directly by calling: ``` swh loader -C run git ``` or "git_disk". ## Configuration sample /tmp/git.yml: ``` storage: cls: remote args: url: http://localhost:5002/ ``` upstream-ontologist-0.2.4/testdata/pkg-info/expected.yaml000064400000000000000000000021371046102023000217010ustar 00000000000000Name: swh.loader.git Version: 2.1.1.dev2+g0002d5a.d20230125 Summary: Software Heritage git loader Homepage: https://forge.softwareheritage.org/diffusion/DLDG/ Author: - !Person name: Software Heritage developers email: swh-devel@inria.fr Bug-Database: https://forge.softwareheritage.org/maniphest Funding: https://www.softwareheritage.org/donate Repository: https://forge.softwareheritage.org/source/swh-loader-git Documentation: https://docs.softwareheritage.org/devel/swh-loader-git/ Description: "The Software Heritage Git Loader is a tool and a library to walk a local\nGit repository and inject into the SWH dataset all contained files that\nweren't known before.\n\nThe main entry points are:\n\n* \n:class:swh.loader.git.loader.GitLoader for the main loader which can ingest either\nlocal or remote git repository's contents. This is the main implementation deployed in\nproduction.\n\n* \n:class:swh.loader.git.from_disk.GitLoaderFromDisk which ingests only local git clone\nrepository.\n\n* \n:class:swh.loader.git.loader.GitLoaderFromArchive which ingests a git repository\nwrapped in an archive.\n\n" upstream-ontologist-0.2.4/testdata/poetry/expected.yaml000064400000000000000000000011621046102023000215060ustar 00000000000000Name: gi-docgen Version: '2021.1' Summary: Documentation tool for GObject-based libraries Author: - !Person name: Emmanuele Bassi email: ebassi@gnome.org License: GPL-3.0-or-later AND Apache-2.0 AND CC0-1.0 Homepage: https://gitlab.gnome.org/GNOME/gi-docgen Documentation: https://gnome.pages.gitlab.gnome.org/gi-docgen/ Keywords: - documentation - introspection - gobject - gtk Bug-Database: https://gitlab.gnome.org/GNOME/gi-docgen/issues Repository: https://gitlab.gnome.org/GNOME/gi-docgen.git Repository-Browse: https://gitlab.gnome.org/GNOME/gi-docgen Bug-Submit: https://gitlab.gnome.org/GNOME/gi-docgen/issues/new upstream-ontologist-0.2.4/testdata/poetry/pyproject.toml000064400000000000000000000033021046102023000217330ustar 00000000000000# SPDX-FileCopyrightText: 2021 GNOME Foundation # # SPDX-License-Identifier: Apache-2.0 OR GPL-3.0-or-later [tool.poetry] name = "gi-docgen" packages = [ { include = "gidocgen" }, ] version = "2021.1" description = "Documentation tool for GObject-based libraries" authors = ["Emmanuele Bassi "] license = "GPL-3.0-or-later AND Apache-2.0 AND CC0-1.0" readme = "README.md" homepage = "https://gitlab.gnome.org/GNOME/gi-docgen" documentation = "https://gnome.pages.gitlab.gnome.org/gi-docgen/" keywords = ["documentation","introspection","gobject","gtk"] classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: BSD", "Operating System :: POSIX :: Linux", "Topic :: Desktop Environment :: Gnome", "Topic :: Software Development :: Documentation" ] [tool.poetry.urls] "Bug Tracker" = "https://gitlab.gnome.org/GNOME/gi-docgen/issues" [tool.poetry.dependencies] python = "^3.6" Markdown = "^3" MarkupSafe = "^1" Pygments = "^2" Jinja2 = "^2" toml = "^0" typogrify = "^2" [tool.poetry.dev-dependencies] coverage = "^5" green = "^3" mypy = "0.812" flake8 = "^3" black = {version = "^20.8b1", allow-prereleases = true} [tool.poetry.scripts] gi-docgen = "gidocgen.__main__:main" [tool.coverage.report] show_missing = true exclude_lines = [ "pragma: no cover", "if False" ] [build-system] requires = ["setuptools","wheel"] upstream-ontologist-0.2.4/testdata/pom/debian/control000064400000000000000000000000151046102023000211130ustar 00000000000000Source: blah upstream-ontologist-0.2.4/testdata/pom/expected.yaml000064400000000000000000000005771046102023000207700ustar 00000000000000{"Bug-Database": "https://github.com/example/blah/issues", "Bug-Submit": "https://github.com/example/blah/issues/new", "Homepage": "http://www.example.com", "License": "GNU Lesser General Public License, Version 2.1", "Name": "libblah", "Repository": "https://github.com/example/blah.git", "Repository-Browse": "https://github.com/example/blah", "Summary": "Bla lah lah lah"} upstream-ontologist-0.2.4/testdata/pom/pom.xml000064400000000000000000000062471046102023000176200ustar 00000000000000 4.0.0 com.example libblah jar ${version} libblah http://www.example.com Bla lah lah lah. GNU Lesser General Public License, Version 2.1 http://www.gnu.org/licenses/lgpl-2.1.txt Joe Example joe@example.com Org1 http://www.example.com/org1 scm:git:https://github.com/example/blah.git scm:git:git@github.com/example/blah.git https://github.com/example/blah ossrh https://oss.sonatype.org/content/repositories/snapshots java org.apache.maven.plugins maven-compiler-plugin 3.1 org.apache.maven.plugins maven-release-plugin 2.4.1 org.apache.maven.plugins maven-source-plugin attach-sources jar org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.sonatype.plugins nexus-staging-maven-plugin 1.6.2 true ossrh https://oss.sonatype.org/ true org.apache.maven.plugins maven-gpg-plugin 1.5 sign-artifacts verify sign upstream-ontologist-0.2.4/testdata/pubspec.yml/expected.yaml000064400000000000000000000007021046102023000224240ustar 00000000000000Name: dart Description: You don't need to be worried just because you have to support multiple screens Version: 2.1.53 Homepage: https://github.com/DisplayKit/responsive_styles Repository: https://github.com/DisplayKit/responsive_styles.git Repository-Browse: https://github.com/DisplayKit/responsive_styles Bug-Database: https://github.com/DisplayKit/responsive_styles/issues Bug-Submit: https://github.com/DisplayKit/responsive_styles/issues/new upstream-ontologist-0.2.4/testdata/pubspec.yml/pubspec.yml000064400000000000000000000006021046102023000221220ustar 00000000000000name: dart description: You don't need to be worried just because you have to support multiple screens version: 2.1.53 homepage: https://github.com/DisplayKit/responsive_styles environment: sdk: ">=2.17.5 <3.0.0" flutter: ">=1.17.0" dependencies: flutter: sdk: flutter mockito: ^5.2.0 dev_dependencies: flutter_test: sdk: flutter flutter_lints: ^2.0.0 flutter: upstream-ontologist-0.2.4/testdata/python/blah/__init__.py000064400000000000000000000000001046102023000220250ustar 00000000000000upstream-ontologist-0.2.4/testdata/python/debian/control000064400000000000000000000005241046102023000216460ustar 00000000000000Source: golang-github-blah-blah Section: devel Priority: optional Standards-Version: 4.2.0 Maintainer: Some Maintainer Build-Depends: debhelper (>= 11~), dh-golang, golang-any Package: golang-github-blah-blah Architecture: all Depends: ${misc:Depends} Description: blah blah blah blah upstream-ontologist-0.2.4/testdata/python/expected.yaml000064400000000000000000000007141046102023000215070ustar 00000000000000{"Bug-Database": "https://salsa.debian.org/jelmer/lintian-brush/issues", "Bug-Submit": "https://salsa.debian.org/jelmer/lintian-brush/issues/new", "Description": "blah blah", "Homepage": "https://salsa.debian.org/jelmer/lintian-brush", "Name": "blah", "Repository": "https://salsa.debian.org/jelmer/lintian-brush.git", "Repository-Browse": "https://salsa.debian.org/jelmer/lintian-brush", "Summary": "Automatic lintian issue fixer", "Version": "0.16"} upstream-ontologist-0.2.4/testdata/python/setup.py000075500000000000000000000005011046102023000205310ustar 00000000000000#!/usr/bin/python3 from setuptools import setup setup( name="blah", version="0.16", packages=["blah"], url="https://salsa.debian.org/jelmer/lintian-brush", description="Automatic lintian issue fixer", project_urls={ "Repository": "https://salsa.debian.org/jelmer/lintian-brush", }, ) upstream-ontologist-0.2.4/testdata/r-description/DESCRIPTION000064400000000000000000000056251046102023000217770ustar 00000000000000Package: readxl Title: Read Excel Files Version: 1.3.1 Authors@R: c(person(given = "Hadley", family = "Wickham", role = "aut", email = "hadley@rstudio.com", comment = c(ORCID = "0000-0003-4757-117X")), person(given = "Jennifer", family = "Bryan", role = c("aut", "cre"), email = "jenny@rstudio.com", comment = c(ORCID = "0000-0002-6983-2759")), person(given = "RStudio", role = c("cph", "fnd"), comment = "Copyright holder of all R code and all C/C++ code without explicit copyright attribution"), person(given = "Marcin", family = "Kalicinski", role = c("ctb", "cph"), comment = "Author of included RapidXML code"), person(given = "Komarov Valery", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Christophe Leitienne", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Bob Colbert", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "David Hoerl", role = c("ctb", "cph"), comment = "Author of included libxls code"), person(given = "Evan Miller", role = c("ctb", "cph"), comment = "Author of included libxls code")) Description: Import excel files into R. Supports '.xls' via the embedded 'libxls' C library and '.xlsx' via the embedded 'RapidXML' C++ library . Works on Windows, Mac and Linux without external dependencies. License: GPL-3 URL: https://readxl.tidyverse.org, https://github.com/tidyverse/readxl BugReports: https://github.com/tidyverse/readxl/issues Imports: cellranger, Rcpp (>= 0.12.18), tibble (>= 1.3.1), utils Suggests: covr, knitr, rmarkdown, rprojroot (>= 1.1), testthat LinkingTo: progress, Rcpp VignetteBuilder: knitr Encoding: UTF-8 LazyData: true Note: libxls-SHA cef1393 RoxygenNote: 6.1.1 NeedsCompilation: yes Packaged: 2019-03-13 16:01:23 UTC; jenny Author: Hadley Wickham [aut] (), Jennifer Bryan [aut, cre] (), RStudio [cph, fnd] (Copyright holder of all R code and all C/C++ code without explicit copyright attribution), Marcin Kalicinski [ctb, cph] (Author of included RapidXML code), Komarov Valery [ctb, cph] (Author of included libxls code), Christophe Leitienne [ctb, cph] (Author of included libxls code), Bob Colbert [ctb, cph] (Author of included libxls code), David Hoerl [ctb, cph] (Author of included libxls code), Evan Miller [ctb, cph] (Author of included libxls code) Maintainer: Jennifer Bryan Repository: CRAN Date/Publication: 2019-03-13 16:30:02 UTC upstream-ontologist-0.2.4/testdata/r-description/expected.yaml000064400000000000000000000013601046102023000227460ustar 00000000000000Archive: CRAN Bug-Database: https://github.com/tidyverse/readxl/issues Bug-Submit: https://github.com/tidyverse/readxl/issues/new Contact: Jennifer Bryan Homepage: https://github.com/tidyverse/readxl Name: readxl Repository: https://github.com/tidyverse/readxl.git Repository-Browse: https://github.com/tidyverse/readxl Description: |- Import excel files into R. Supports '.xls' via the embedded 'libxls' C library and '.xlsx' via the embedded 'RapidXML' C++ library . Works on Windows, Mac and Linux without external dependencies. License: GPL-3 Maintainer: !Person name: Jennifer Bryan email: jenny@rstudio.com Summary: Read Excel Files Version: 1.3.1 upstream-ontologist-0.2.4/testdata/readme-command/README000064400000000000000000000002611046102023000212270ustar 00000000000000This is a project. You can clone it by running: git clone https://github.com/blah/blah blah Please report bugs at: https://github.com/OpenPrinting/cups-filters/issues upstream-ontologist-0.2.4/testdata/readme-command/expected.yaml000064400000000000000000000007431046102023000230410ustar 00000000000000Bug-Database: https://github.com/OpenPrinting/cups-filters/issues Bug-Submit: https://github.com/OpenPrinting/cups-filters/issues/new Homepage: https://github.com/blah/blah Name: readme-command Repository: https://github.com/blah/blah.git Repository-Browse: https://github.com/blah/blah Description: > This is a project. You can clone it by running: git clone https://github.com/blah/blah blah Please report bugs at: https://github.com/OpenPrinting/cups-filters/issues upstream-ontologist-0.2.4/testdata/readme-other/README000064400000000000000000000001621046102023000207320ustar 00000000000000This is a project. One of the dependencies is blah, which you can install from: https://github.com/blah/blah.git upstream-ontologist-0.2.4/testdata/readme-other/expected.yaml000064400000000000000000000006521046102023000225430ustar 00000000000000{"Bug-Database": "https://github.com/blah/blah/issues", "Bug-Submit": "https://github.com/blah/blah/issues/new", "Homepage": "https://github.com/blah/blah", "Name": "readme-other", "Repository": "https://github.com/blah/blah.git", "Repository-Browse": "https://github.com/blah/blah", "Description": "This is a project. One of the dependencies is blah, which you can install from:\n\nhttps://github.com/blah/blah.git\n"} upstream-ontologist-0.2.4/testdata/security.md/SECURITY.md000064400000000000000000000000461046102023000215360ustar 00000000000000Please send email to blah@example.com upstream-ontologist-0.2.4/testdata/security.md/debian/control000064400000000000000000000000151046102023000225660ustar 00000000000000Source: blah upstream-ontologist-0.2.4/testdata/security.md/debian/upstream/metadata000064400000000000000000000001471046102023000245340ustar 00000000000000--- Repository: https://github.com/example/blah.git Repository-Browse: https://github.com/example/blah upstream-ontologist-0.2.4/testdata/security.md/expected.yaml000064400000000000000000000000671046102023000224350ustar 00000000000000{"Name": "security.md", "Security-MD": "SECURITY.md"} upstream-ontologist-0.2.4/testdata/setup.py1/README.md000064400000000000000000000000561046102023000206270ustar 00000000000000This is a python project that does something. upstream-ontologist-0.2.4/testdata/setup.py1/expected.yaml000064400000000000000000000012201046102023000220270ustar 00000000000000Author: - !Person name: Jelmer Vernooij email: jelmer@jelmer.uk Bug-Database: https://github.com/jelmer/upstream-ontologist/issues Bug-Submit: https://github.com/jelmer/upstream-ontologist/issues/new Contact: Jelmer Vernooij Description: "This is a python project that does something.\n" Homepage: https://github.com/jelmer/upstream-ontologist Maintainer: !Person name: Jelmer Vernooij email: jelmer@jelmer.uk Name: upstream-ontologist Repository: https://github.com/jelmer/upstream-ontologist.git Repository-Browse: https://github.com/jelmer/upstream-ontologist Summary: tracking of upstream project metadata Version: 0.1.35 upstream-ontologist-0.2.4/testdata/setup.py1/setup.cfg000064400000000000000000000024261046102023000211740ustar 00000000000000[metadata] name = upstream-ontologist version = 0.1.35 author = Jelmer Vernooij author_email = jelmer@jelmer.uk maintainer = Jelmer Vernooij maintainer_email = jelmer@jelmer.uk url = https://github.com/jelmer/upstream-ontologist description = tracking of upstream project metadata long_description = file:README.md long_description_content_type = text/markdown project_urls = Repository=https://github.com/jelmer/upstream-ontologist.git [options] python_requires = >= 3.7 packages = upstream_ontologist upstream_ontologist.debian install_requires = python_debian typing_extensions;python_version<="3.7" ruamel.yaml # Ideally this would be an optional dependency breezy>=3.3.0 tests_require = breezy>=3.3.0 [options.entry_points] console_scripts = guess-upstream-metadata=upstream_ontologist.__main__:main autodoap=upstream_ontologist.doap:main autocodemeta=upstream_ontologist.codemeta:main [options.extras_require] cargo = tomlkit debian_changelog = python-debianbts httplib2>=0.7.8 python_debian debian_watch = debmutate[watch]>=0.59 debian_rules = debmutate pyproject = tomlkit homepage = bs4 readme = docutils lxml bs4 markdown pygments setup.cfg = setuptools [options.package_data] upstream_ontologist = py.typed upstream-ontologist-0.2.4/testdata/setup.py1/setup.py000075500000000000000000000001731046102023000210650ustar 00000000000000#!/usr/bin/python3 from setuptools import setup setup(data_files=[("share/man/man1", ["man/guess-upstream-metadata.1"])]) upstream-ontologist-0.2.4/testdata/travis.yml/.travis.yml000064400000000000000000000010011046102023000217100ustar 00000000000000language: go go_import_path: github.com/ethereum/go-ethereum sudo: false jobs: allow_failures: - stage: build os: osx go: 1.17.x env: - azure-osx - azure-ios - cocoapods-ios include: # This builder only tests code linters on latest version of Go - stage: lint os: linux dist: bionic go: 1.19.x env: - lint git: submodules: false # avoid cloning ethereum/tests script: - go run build/ci.go lint upstream-ontologist-0.2.4/testdata/travis.yml/expected.yaml000064400000000000000000000001011046102023000222640ustar 00000000000000Name: travis.yml Go-Import-Path: github.com/ethereum/go-ethereum upstream-ontologist-0.2.4/testdata/watch/debian/watch000064400000000000000000000002261046102023000210600ustar 00000000000000version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example/releases .*/Toric-(\d\S*)\.tar\.gz upstream-ontologist-0.2.4/testdata/watch/expected.yaml000064400000000000000000000004721046102023000212750ustar 00000000000000{"Bug-Database": "https://github.com/example/example/issues", "Bug-Submit": "https://github.com/example/example/issues/new", "Homepage": "https://github.com/example/example", "Name": "example", "Repository": "https://github.com/example/example.git", "Repository-Browse": "https://github.com/example/example"} upstream-ontologist-0.2.4/testdata/watch-git/debian/watch000064400000000000000000000002411046102023000216360ustar 00000000000000version=3 opts="mode=git, gitmode=shallow, pgpmode=gittag" \ https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git \ refs/tags/(.*) debian upstream-ontologist-0.2.4/testdata/watch-git/expected.yaml000064400000000000000000000001641046102023000220540ustar 00000000000000{"Name": "watch-git", "Repository": "https://git.kernel.org/pub/scm/linux/kernel/git/firmware/linux-firmware.git"} upstream-ontologist-0.2.4/testdata/watch2/debian/watch000064400000000000000000000002641046102023000211440ustar 00000000000000version=4 opts=repack,compression=xz,dversionmangle=s/\+ds//,repacksuffix=+ds \ https://github.com/example/example-cat/tags \ (?:.*?/)?v?(\d[\d.]*)\.tar\.gz debian uupdate upstream-ontologist-0.2.4/testdata/watch2/expected.yaml000064400000000000000000000005221046102023000213530ustar 00000000000000{"Bug-Database": "https://github.com/example/example-cat/issues", "Bug-Submit": "https://github.com/example/example-cat/issues/new", "Homepage": "https://github.com/example/example-cat", "Name": "example-cat", "Repository": "https://github.com/example/example-cat.git", "Repository-Browse": "https://github.com/example/example-cat"}