flexi_logger-0.29.8/.cargo_vcs_info.json0000644000000001360000000000100136020ustar { "git": { "sha1": "acc1ed0d291b567870b1f3bd46cdf4cc3b25503d" }, "path_in_vcs": "" }flexi_logger-0.29.8/.github/workflows/ci_test.yml000064400000000000000000000037111046102023000201460ustar 00000000000000name: CI on: push: branches: [ main ] pull_request: branches: [ main ] workflow_dispatch: jobs: fmt: name: Check formatting runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install latest nightly with rustfmt uses: actions-rs/toolchain@v1 with: toolchain: nightly profile: minimal override: true components: rustfmt - name: Run cargo fmt uses: actions-rs/cargo@v1 with: command: fmt args: -- --check lint: name: Clippy check strategy: matrix: os: [ ubuntu-latest, windows-latest, macos-latest ] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 - name: Install latest nightly with clippy uses: actions-rs/toolchain@v1 with: toolchain: nightly profile: minimal override: true components: clippy - name: Run cargo clippy uses: actions-rs/clippy-check@v1 with: token: ${{ secrets.GITHUB_TOKEN }} args: --all-targets --all-features -- -D warnings build_and_test: name: Build and test strategy: matrix: os: [ ubuntu-latest, windows-latest, macos-latest ] rust: [ stable, 1.72.0 ] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: toolchain: ${{ matrix.rust }} profile: minimal override: true - name: Run cargo build uses: actions-rs/cargo@v1 with: command: build args: --all-features - name: Run cargo test (default features only) uses: actions-rs/cargo@v1 with: command: test - name: Run cargo test with all features uses: actions-rs/cargo@v1 with: command: test args: --all-features flexi_logger-0.29.8/.gitignore000064400000000000000000000002431046102023000143610ustar 00000000000000examples/_* target Cargo.lock *.alerts *.log *.seclog *.gz *.zip link_to_log link_to_mt_log log_files todo *logspec.toml tests/*logspec.toml *~ .*~ .vscode .idea/ flexi_logger-0.29.8/.markdownlint.json000064400000000000000000000003711046102023000160550ustar 00000000000000{ "MD041": { "level": 1 }, "MD013": { "line_length": 100, "heading_line_length": 300, "code_block_line_length": 180 }, "MD033": { "allowed_elements": [ "span" ] } } flexi_logger-0.29.8/CHANGELOG.md000064400000000000000000000663041046102023000142140ustar 00000000000000# Changelog for flexi_logger All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [0.25.8] - 2024-12-18 Fix issue #185. ## [0.25.7] - 2024-12-10 Fix issue #184. Update dependencies. Bump minimal supported rust version to 1.72.0. ## [0.25.6] - 2024-11-02 Allow for custom process names when using the syslog writer (PR #182, kudos to [Julien JPK](https://github.com/julienjpk-withings)). ## [0.25.5] - 2024-10-29 Fix [issue #181](https://github.com/emabee/flexi_logger/issues/181). Increase stack sizes for flusher threads from very minimal 128 to 1024 bytes. Add badge for OpenSSF Best Practices. ## [0.29.4] - 2024-10-21 Fix [issue #179](https://github.com/emabee/flexi_logger/issues/179) that in rotation with custom time format the custom time format was not applied properly (kudos to [alboyer](https://github.com/alboyer)). Remove the RwLock around the color palette. ## [0.29.3] - 2024-10-12 Removed dependency to `glob` by implementing the necessary file searches explicitly, to fix issue-173. ## [0.29.2] - 2024-10-07 Fix a regression ([issue #178](https://github.com/emabee/flexi_logger/issues/178)) introduced with [0.29.1]. Fix error with rotation & append & explicit directory & Naming::Timestamps. ## [0.29.1] - 2024-10-02 Fix [issue #176](https://github.com/emabee/flexi_logger/issues/176): leading underscore in log file name if only the infix is used (no basename, no start time, no discriminant). ## [0.29.0] - 2024-08-25 Revised `SyslogWriter` (-> version bump): introduced builder pattern, added a configuration option for the message format (resolves [issue #168](https://github.com/emabee/flexi_logger/issues/168), kudos to [krims0n32](https://github.com/krims0n32)). `LoggerHandle::existing_log_files` now also returns a meaningful result if file rotation is not used. Kudos to [drdo](https://github.com/drdo) for [discussion 170](https://github.com/emabee/flexi_logger/discussions/170). ## [0.28.5] - 2024-06-21 Remove unnecessary dependency to `is-terminal`. Add impl `From` for `LogSpecification`. Kudos to [Oakchris1955](https://github.com/Oakchris1955). ## [0.28.4] - 2024-06-14 Fix [issue #162](https://github.com/emabee/flexi_logger/issues/162) (FileLogWriter does not follow its max_level), kudos to [JoeWildfong](https://github.com/JoeWildfong). ## [0.28.3] - 2024-06-10 Add special handling for empty current infix to `Naming::TimestampsCustomFormat` ([issue #161](https://github.com/emabee/flexi_logger/issues/161)). ## [0.28.2] - 2024-06-09 Add variant `Naming::TimestampsCustomFormat` ([issue #158](https://github.com/emabee/flexi_logger/issues/158)), kudos to [jb-alvarado](https://github.com/jb-alvarado). ## [0.28.1] - 2024-06-01 Introduce `flexi_logger::init()` as super-minimal entry usage. Update dependencies. ## [0.28.0] - 2024-03-16 Detach from `lazy_static`, use `std::sync::OnceLock` instead. Bump minimal supported rust version to 1.70. If `flexi_logger` runs into issues itself, it will try to write error messages into the configured error output channel. By default, `flexi_logger` panics if writing to the error output channel fails. It is now possible to gracefully "swallow" the error messages and continue (see [panic_if_error_channel_is_broken](https://docs.rs/flexi_logger/latest/flexi_logger/struct.Logger.html#method.panic_if_error_channel_is_broken)). The new feature `kv` allows making use of the `kv` feature of `log` together with `flexi_logger`s format functions, and adds a dependency to `log/kv_serde`. The new feature `json` adds a format function `json_format` and dependencies to `serde_json`, `serde` and `serde_derive`. ## [0.27.4] - 2024-01-20 Add ability to omit the basename cleanly, without leading underscore ([issue #153](https://github.com/emabee/flexi_logger/issues/153), kudos to [krystejj](https://github.com/krystejj). ## [0.27.3] - 2023-11-10 Fix [issue #152](https://github.com/emabee/flexi_logger/issues/152). ## [0.27.2] - 2023-09-27 Fix wrong timestamp handling for the second rotation (second part of [issue #150](https://github.com/emabee/flexi_logger/issues/150)). ## [0.27.1] - 2023-09-27 Fix issues with sub-second rotations and with cleanup when all logfiles should be compressed ([issue #150](https://github.com/emabee/flexi_logger/issues/150)). ## [0.27.0] - 2023-09-20 Revise, and modify the signature of, `LoggerHande::existing_log_files()` (version bump). Extend the trait `LogWriter` with an optional method `rotate`. Extend impact of `LoggerHande::trigger_rotation()` to all configured writers. ## [0.26.1] - 2023-09-19 Introduce new naming variants that work without `_rCURRENT` files: `Naming::TimestampsDirect` and `Naming::NumbersDirect` (delivers #127). Improve documentation of filename handling. Introduce `LoggerHandle.trigger_rotation()` (delivers #147). ## [0.26.0] - 2023-08-30 Re-open output also for other writers (delivers #143). Rename method to re-open output from LoggerHandle (leads to version bump). Use `dep:` in Cargo.toml for references to most dependencies, in order to avoid implicit "features". Fix #145 (minor internal optimization). ## [0.25.6] - 2023-07-28 Add methods `LoggerHandle::adapt_duplication_to_stderr` and `LoggerHandle::adapt_duplication_to_stdout` (realizes issue #142). Extend docu on providing custom format. Use rust-script instead of cargo-script for qualification scripts. Update dependencies. ## [0.25.5] - 2023-05-25 Use display (rather than debug) formatting for thread names (kudos to [mpalmer](https://github.com/mpalmer)). ## [0.25.4] - 2023-05-05 Add `LoggerHandle::existing_log_files()`. ## [0.25.3] - 2023-03-04 Introduce additional `WriteMode` variant `SupportCapture`. ## [0.25.2] - 2023-03-02 Replace dependency `atty` with `is-terminal`, due to [RUSTSEC-2021-0145](https://rustsec.org/advisories/RUSTSEC-2021-0145). ## [0.25.1] - 2023-02-06 Use chrono's support for rfc3339. Improve tests for `DeferredNow`. ## [0.25.0] - 2023-02-03 Fix issues #132 and #133. Update dependencies. Bump MSRV to 1.60, because toml needs it now. Improve documentation of feature dependencies. Minor stuff. ## [0.24.2] - 2022-12-15 Move from unmaintained `ansi_term` to `nu-ansi-term`. Fix new clippies. ## [0.24.1] - 2022-11-01 Some improvements in respect to `use_utc`: - add method DeferredNow::now_utc_owned() - documentation - test improvement ## [0.24.0] - 2022-10-06 Revert back to using `chrono`, since `chrono` is now fortunately maintained again and its timezone handling is fixed meanwhile - this change largely reverts the changes done for [0.19.6] - a version bump is necessary since this affects the API, e.g. in `DeferredNow` - the feature `use_chrono_for_offset` became obsolete and is removed On linux and Mac, improve the logic that handles the issue described again in [issue-122](https://github.com/emabee/flexi_logger/issues/122). ## [0.23.3] - 2022-09-11 Re-introduce `LoggerHandle::clone()`. ## [0.23.2] - 2022-09-06 Fix security advisory (see #117) by replacing the dependency from `notify 4.0` with `notify-debouncer-mini 0.2` (which depends on `notify 5.0`). As a side-effect, the thread `flexi_logger-specfile-watcher` is replaced with `notify-rs debouncer loop`. Adapt and simplify the submodule `trc` a bit. ## [0.23.1] - 2022-09-02 Fix a panic that can happen if `Naming::Timestamps` and `FileSpec::o_suffix(None)` are used and rotation happens within a second ([issue-116](https://github.com/emabee/flexi_logger/issues/116)). Bump MSRV to 1.59 (because the `time` crate did this). ## [0.23.0] - 2022-08-04 Switch to edition 2021, use latest patch of `time` version "0.3", bump minimal supported rust version to "1.57.0". ## [0.22.6] - 2022-08-03 Add interconversions between log::LevelFilter and flexi_logger::Duplicate (kudos to [rlee287](https://github.com/rlee287)). ## [0.22.5] - 2022-06-03 Only depend on the parts of crossbeam that are used (kudos to [bsilver8192](https://github.com/bsilver8192)). ## [0.22.4] - 2022-06-03 Add support for Rfc3164 to `SyslogWriter` (kudos to [mbodmer](https://github.com/mbodmer)). Add `Clone` and `Copy` implementations to enum Duplicate (kudos to [ComplexSpaces](complexspacescode@gmail.com)). ## [0.22.3] - 2022-02-01 Code maintenance: remove the feature "external_rotation". Bump minimal version of `time` crate to "0.3.7". ## [0.22.2] - 2022-01-08 Add `LoggerHandle::reopen_outputfile` and deprecate feature `external_rotation`. ## [0.22.1] - 2022-01-05 Enable symlink on all unix platforms, not just linux. Rework the optional syslog writer (kudos to [ObsceneGiraffe](https://github.com/ObsceneGiraffe)): - bugfix: write only full lines - use owned buffer to avoid allocations - encapsulate implementation details - remove additional buffer from `SyslogConnector::Tcp` Add method `LoggerHandle::flw_config` (kudos to [Ivan Azoyan](https://github.com/azoyan)). Reduce the used feature-list of the optional dependency chrono (to get rid of an indirect dependency to an old time version). Add feature `external_rotation`. ## [0.22.0] - 2021-12-12 Improve the option to use UTC for all timestamps (in filenames and log lines) () such that the error message regarding a failed offset detection is not provoked if UTC is enforced. The API modification done in 0.21.0 to `DeferredNow` is reverted. ## [0.21.0] - 2021-12-10 Add option to use UTC for all timestamps (in filenames and log lines). ## [0.20.1] - 2021-11-18 Add the optional feature `use_chrono_for_offset` as a workaround for the current behavior of `time` on unix. Add an option to configure the error output channel. ## [0.20.0] - 2021-11-13 Switch to `time 0.3.5`, and retrieve the UTC offset while `flexi_logger` is initialized. See also `time`'s [CHANGELOG](https://github.com/time-rs/time/blob/main/CHANGELOG.md#035-2021-11-12). **Reason for the version bump**: The inner representation of `DeferredNow` has changed from `chrono::DateTime` to `time::OffsetDateTime`, and this is visible e.g. to implementors of format functions. ## [0.19.6] - 2021-10-26 Use `time` directly, instead of `chrono`, due to [RUSTSEC-2020-0159](https://rustsec.org/advisories/RUSTSEC-2020-0159). Bumps the minimal supported rust version to 1.51.0. Improves performance a bit. Unfortunately, this version suffers on linux from `time`'s somewhat radical behavior to not support UTC offsets on linux. ## [0.19.5] - 2021-10-19 Remove time 0.1 from dependency tree (see [PR 96](https://github.com/emabee/flexi_logger/issues/96)) - kudos to [complexspaces](https://github.com/complexspaces)! Add feature `dont_minimize_extra_stacks` (fixes [issue-95](https://github.com/emabee/flexi_logger/issues/95)) - kudos to [leishiao](https://github.com/leishiao)! ## [0.19.4] - 2021-09-15 Fix [issue-94](https://github.com/emabee/flexi_logger/issues/94) - kudos to [leishiao](https://github.com/leishiao)! ## [0.19.0] - [0.19.3] - 2021-09-10 Platform-specific fixes, and introduction of github-actions-based CI. Kudos to [dallenng](https://github.com/dallenng) and [HEnquist](https://github.com/HEnquist)! `FileLogWriter` has been functionally extended to make it usable "stand-alone". As part of that, the `FlWriteMode` is gone, and the normal `WriteMode` is used. `WriteMode::BufferDontFlushWith` was added. A new experimental feature (and module) "trc" allows using `flexi_logger` functionality with `tracing`. Error handling is improved, error codes are documented comprehensively, errors now also print a link to the error documentation. Default color for DEBUG lines was changed (fixes [issue-88](https://github.com/emabee/flexi_logger/issues/88), kudos goes to [HEnquist](https://github.com/HEnquist)!). Test coverage is improved. ## [0.18.1] - 2021-08-27 Implement async mode also for `log_to_stdout()` and `log_to_stderr()`. ## [0.18.0] - 2021-06-02 Significant API revision, to better cope with new features and for better readability/applicability. Most important changes: - Better error handling in factory methods: - `Logger::with_env()` is replaced with `Logger::try_with_env()`, which returns a `Result` - `Logger::with_str()` is replaced with `Logger::try_with_str()`, which returns a `Result` - `Logger::with_env_or_str()` is replaced with `Logger::try_with_env_or_str()`, which returns a `Result` - consequently, the method `Logger::check_parser_error` is gone - Bundling file-related aspects - introduction of `FileSpec` - move of filename-related methods from `Logger` to `FileSpec` (and similarly on the `FileLogWriter`) - `Logger::log_target(LogTarget)` is replaced with a set of methods - `Logger::log_to_file(FileSpec)` - `Logger::log_to_stdout()` - `Logger::log_to_stderr()` - `Logger::log_to_writer(Box)` - `Logger::log_to_file_and_writer(FileSpec,Box)` - `Logger::do_not_log()` - The new method [`Logger::write_mode(WriteMode)`](https://docs.rs/flexi_logger/latest/flexi_logger/struct.Logger.html#method.buffer_and_flush) - replaces several methods to control buffer handling etc - offers additionally **asynchronous file I/O** (if the crate feature `async` is used) - Keeping the `LoggerHandle` alive has become crucial (except for trivial cases)! - Several methods are now more generic with their input parameters - A new method `LoggerHandle::reset_flw` allows reconfiguring a used `FileLogWriter` at runtime Added an option to apply a stateful filter before log lines are really written (kudos to jesdazrez (Jesús Trinidad Díaz Ramírez)!). Fixed error handling in logspec parsing (wrong error was thrown). Several docu improvements. ## [0.17.1] - 2021-01-14 Add options `Logger::buffer_and_flush()` and `buffer_and_flush_with()` as means to avoid long output delays. ## [0.17.0] - 2021-01-08 Introduce optional buffering of log output. This increases performance (which can be relevant for programs with really high log production), but delays log line appearance in the output, which can be confusing, and requires to flush or shutdown the logger at the end of the program to ensure that all logs are written to the output before the program terminates. Reduce the size of `LogConfiguration` considerably by moving the optional and rarely used textfilter into the heap. This unfortunately leads to an incompatible change in a rarely used public method (`LogConfiguration::text_filter()` was returning a `&Option`, and is now returning `Option<&Regex>`), which enforces a version bump. Rename ReconfigurationHandle to LoggerHandle (and add an type alias with the old name). Add the public method `LoggerHandle::flush()`. Expose `DeferredNow::new()`. Add some `must_use` annotations where appropriate. ## [0.16.2] - 2020-11-18 Add module [code-examples](https://docs.rs/flexi_logger/latest/flexi_logger/code_examples/index.html) with additional usage documentation. This is a follow-up of a PR, kudos goes to [devzbysiu](https://github.com/devzbysiu)! ## [0.16.1] - 2020-09-20 Support empty toml spec files (kudos to ijackson for [pull request 66](https://github.com/emabee/flexi_logger/pull/66)!) (was supposed to be part of 0.16.0, but I had forgotten to merge it). ## [0.16.0] - 2020-09-19 If file logging is used, do not create the output file if no log is written. Solves [issue-62](https://github.com/emabee/flexi_logger/issues/62). Improve color handling - introduce AdaptiveFormat for a clearer API - Support using feature `atty` without provided coloring - Extend example `colors` to provide insight in how AdaptiveFormat works - Remove the deprecated method `Logger::do_not_log()`; use `log_target()` with `LogTarget::DevNull` instead. - Remove deprecated method `Logger::o_log_to_file()`; use `log_target()` instead. The clearer convenience method `Logger::log_to_file()` is still available. Improve the compression feature. Solves [issue-65](https://github.com/emabee/flexi_logger/issues/65). - breaking change: change the file suffix for the compressed log files from `.zip` to `.gz` - Fix wrong wording in code and documentation - deprecate the feature name `ziplog` and call the feature now `compress` - rename `Cleanup::KeepZipFiles` into `Cleanup::KeepCompressedFiles` and `Cleanup::KeepLogAndZipFiles` into `Cleanup::KeepLogAndCompressedFiles` - the old names still work but are deprecated If file logging is used, do not create the output file if no log is written Solves issue [issue-62](https://github.com/emabee/flexi_logger/issues/62). ## [0.15.12] - 2020-28-08 Make `1.37.0` the minimal rust version for `flexi_logger`. ## [0.15.11] - 2020-08-07 Introduce feature `specfile_without_notification` to allow coping with OS issues (solves [issue-59](https://github.com/emabee/flexi_logger/issues/59)). ## [0.15.10] - 2020-07-22 Minor code maintenance. ## [0.15.9] - 2020-07-21 Allow using the log target with fantasy names, like with `env_logger`. Solves [issue-56](https://github.com/emabee/flexi_logger/issues/56). ## [0.15.8] - 2020-07-20 Allow modifying the coloring palette through the environment variable `FLEXI_LOGGER_PALETTE`. See function [style](https://docs.rs/flexi_logger/latest/flexi_logger/fn.style.html) for details. Solves [issue-55](https://github.com/emabee/flexi_logger/issues/55). By default, don't use colors if stdout or stderr are not a terminal Solves [issue-57](https://github.com/emabee/flexi_logger/issues/57). Add variant Criterion::AgeOrSize (kudos to [pscott](https://github.com/pscott)!, [PR-54](https://github.com/emabee/flexi_logger/pull/54)). ## [0.15.7] - 2020-07-02 Add some Debug derives (kudos to [pscott](https://github.com/pscott)!, [PR-52](https://github.com/emabee/flexi_logger/pull/52)). ## [0.15.6] - 2020-07-02 Introduce separate formatting for stdout (kudos to [pscott](https://github.com/pscott)!, [PR-51](https://github.com/emabee/flexi_logger/pull/51)). Deprecate `Logger::do_not_log()`. ## [0.15.5] - 2020-06-18 Add `Logger::duplicate_to_stdout()` to fix [issue-47](https://github.com/emabee/flexi_logger/issues/47). ## [0.15.4] - 2020-06-09 Fix [issue-45](https://github.com/emabee/flexi_logger/issues/45), which was a panic in the specfile watcher when some log files were deleted manually while the program was running (kudos to [avl](https://github.com/avl)!, [PR-46](https://github.com/emabee/flexi_logger/pull/46)). ## [0.15.3] - 2020-06-04 Add compatibility with multi_log by adding methods `Logger::build` and `Logger::build_with_specfile` (fixes issue-44). Add `LogSpecBuilder::insert_modules_from()` (fixes issue-43). ## [0.15.2] - 2020-03-24 Improve handling of parse-errors. Fix default format for files (was and is documented to be uncolored, but was colored). ## [0.15.1] - 2020-03-04 Make the textfilter functionality an optional default feature; deselecting it removes the regex crate as a required dependency, which reduces the size overhead for any binary using `flexi_logger` (kudos to [Petre Eftime](petre.eftime@gmail.com)!). ## [0.15.0] - 2020-02-27 Refine and rename error variants to allow e.g. differentiating between errors related to the output (files) and errors related to the specfile. ## [0.14.8] - 2020-02-06 Make cleanup more robust, and allow controlling the cleanup-thread also with `Logger::start_with_specfile()`. ## [0.14.7] - 2020-02-04 If rotation is used with cleanup, do the cleanup by default in a background thread (solves [issue 39](https://github.com/emabee/flexi_logger/issues/39)). For the ziplog feature, switch from `zip` crate to `flate2`. ## [0.14.6] - 2020-01-28 Fix [issue 38](https://github.com/emabee/flexi_logger/issues/38) (Old log files are not removed if rCURRENT doesn't overflow). ## [0.14.5] - 2019-11-06 Pass format option into custom loggers (pull request 37). ## [0.14.4] - 2019-09-25 Fix bug in specfile handling ([issue 36](https://github.com/emabee/flexi_logger/issues/36)). Improve docu and implementation of create_symlink. Minor other stuff. ## [0.14.3] - 2019-08-04 Allow defining custom handlers for the default log target (solves [issue 32](https://github.com/emabee/flexi_logger/issues/32)). ## [0.14.2] - 2019-08-04 Use implicit locking of stderr in StdErrWriter. Allow failures in travis' windows build. Add license files. ## [0.14.1] - 2019-08-04 Support recursive logging also with FileLogWriter, sharing the buffer with the PrimaryWriter. Fix multi-threading issue (incorrect line-break handling with stderr). ## [0.14.0] - 2019-07-22 Further stabilize the specfile feature. Remove `LogSpecification::ensure_specfile_exists()` and `LogSpecification::from_file()` from public API, where they should not be (-> version bump). Harmonize all eprintln! calls to prefix the output with "`[flexi_logger]` ". ## [0.13.4] - 2019-07-19 Only relevant for the `specfile` feature: initialize the logger before dealing in any way with the specfile, and do the initial read of the specfile in the main thread, i.e. synchronously, to ensure a deterministic behavior during startup (fixes [issue 31](https://github.com/emabee/flexi_logger/issues/31)). ## [0.13.3] - 2019-07-08 Improve the file watch for the specfile to make the `specfile` feature more robust. E.g. allow editing the specfile on linux with editors that move the original file to a backup name. Add an option to write the log to stdout, as recommended for [twelve-factor apps](https://12factor.net/logs). ## [0.13.2] - 2019-06-02 Make get_creation_date() more robust on all platforms. ## [0.13.1] - 2019-05-29 Fix fatal issue with get_creation_date() on linux (see ). ## [0.13.0] - 2019-05-28 Improve performance for plain stderr logging. Improve robustnesss for recursive log calls. ## [0.12.0] - 2019-05-24 Revise handling of record.metadata().target() versus record.module_path(). Incompatible API modification: Logger.rotate() takes now three parameters. Suppport different formatting for stderr and files. Add feature `colors` (see `README.md` for details). Remove the deprecated `Logger::start_reconfigurable()` and `Logger::rotate_over_size()`. ## [0.11.5] - 2019-05-15 Fix [issue 26](https://github.com/emabee/flexi_logger/issues/26) (logging off for specific modules). Fix [issue 27](https://github.com/emabee/flexi_logger/issues/27) (log files blank after restart). Fix [issue 28](https://github.com/emabee/flexi_logger/issues/28) (add a corresponding set of unit tests to FileLogWriter). ## [0.11.4] - 2019-04-01 Version updates of dependencies. ## [0.11.3] - 2019-03-28 Add SyslogWriter. ## [0.11.2] - 2019-03-22 Change API to more idiomatic parameter types, in a compatible way. Add first implementation of a SyslogWriter. ## [0.11.1] - 2019-03-06 Add option to write windows line endings, rather than a plain `\n`. ## [0.11.0] - 2019-03-02 Add options to cleanup rotated log files, by deleting and/or zipping older files. Remove some deprecated methods. ## [0.10.7] - 2019-02-27 Let the BlackHoleLogger, although it doesn't write a log, still duplicate to stderr. ## [0.10.6] - 2019-02-26 Deprecate `Logger::start_reconfigurable()`, let `Logger::start()` return a reconfiguration handle. Add an option to write all logs to nowhere (i.e., do not write any logs). ## [0.10.5] - 2019-01-15 Eliminate performance penalty for using reconfigurability. ## [0.10.4] - 2019-01-07 Add methods to modify the log spec temporarily. ## [0.10.3] - 2018-12-08 Advance to edition 2018. ## [0.10.2] - 2018-12-07 Log-spec parsing is improved, more whitespace is tolerated. ## [0.10.1] - 2018-11-08 When file rotation is used, the name of the file to which the logs are written is now stable. Details: - the logs are always written to a file with infix _rCURRENT - if this file exceeds the specified rotate-over-size, it is closed and renamed to a file with a sequential number infix, and then the logging continues again to the (fresh) file with infix _rCURRENT Example: After some logging with your program my_prog, you will find files like ```text my_prog_r00000.log my_prog_r00001.log my_prog_r00002.log my_prog_rCURRENT.log ``` ## [0.10.0] - 2018-10-30 `LogSpecification::parse()` now returns a `Result`, rather than a log spec directly (-> version bump). This enables a more reliable usage of FlexiLogger in non-trivial cases. For the sake of compatibility for the normal usecases, the Logger methods `with_str()` etc. remain unchanged. An extra method is added to retrieve parser errors, if desired. ## [0.9.3] - 2018-10-27 Docu improvement. ## [0.9.2] - 2018-08-13 Fix incorrect filename generation with rotation, i.e., switch off timestamp usage when rotation is used. ## [0.9.1] - 2018-08-12 Introduce `Logger::duplicate_to_stderr()`, as a more flexible replacement for `duplicate_error()` and `duplicate_info()`. ## [0.9.0] - 2018-07-06 ### Eliminate String allocation Get rid of the unneccessary String allocation we've been carrying with us since ages. This implies changing the signature of the format functions. In case you provide your own format function, you'll need to adapt it to the new signature. Luckily, the effort is low. As an example, here is how the definition of the `opt_format` function changed: ```rust - pub fn opt_format(record: &Record) -> String { - format!( --- + pub fn opt_format(w: &mut io::Write, record: &Record) -> Result<(), io::Error> { + write!( + w, ``` Similarly, if you're using the advanced feature of providing your own implementation of LogWriter, you need to adapt it. The change again is trivial, and should even slightly simplify your code (you can return io errors and don't have to catch them yourself). ### Misc The docu generation on docs.rs is now configured to considers all features, we thus expose `Logger.start_with_specfile()` only if the specfile feature is used. So we can revert the change done with 0.8.1. ## [0.8.4] - 2018-06-18 Add flexi_logger to category `development-tools::debugging` ## [0.8.3] - 2018-05-14 Make append() also work for rotating log files ## [0.8.2] - 2018-04-03 Add option to append to existing log files, rather than always truncating them ## [0.8.1] - 2018-3-19 Expose `Logger.start_with_specfile()` always ...and not only if the feature "specfile" is used - otherwise it does not appear in the auto-generated docu (because it does not use --allfeatures) ## [0.8.0] - 2018-03-18 Add specfile feature - Add a feature that allows to specify the LogSpecification via a file that can be edited while the program is running _ Remove/hide deprecated APIs - As a consequence, cleanup code, get rid of duplicate stuff. ## [0.7.1] - 2018-03-07 Bugfix: do not create empty files when used in env_logger style. Update docu and the description in cargo.toml ## [0.7.0] - 2018-02-25 Add support for multiple log output streams - replace FlexiWriter with DefaultLogWriter, which wraps a FileLogWriter - add test where a SecurityWriter and an AlertWriter are added - add docu - move deprecated structs to separate package - move benches to folder benches ## [0.6.13] 2018-02-09 Add Logger::try_with_env_or_str() ## [0.6.12] 2018-2-07 Add ReconfigurationHandle::parse_new_spec() ## [0.6.11] 2017-12-29 Fix README.md ## [0.6.10] 2017-12-29 Publish version based on log 0.4 ## (...) ## [0.6.0] 2017-07-13 Use builder pattern for LogSpecification and Logger - deprecate outdated API - "objectify" LogSpecification - improve documentation, e.g. document the dash/underscore issue flexi_logger-0.29.8/Cargo.lock0000644000001150520000000000100115610ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "bumpalo" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "cc" version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9157bbaa6b165880c27a4293a474c91cdcf265cc68cc829bf10be0964a391caf" dependencies = [ "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "windows-targets", ] [[package]] name = "cond_sync" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deb55b7ec2b08e48757119d63b9fcee1ee92d9de07a6866e7614579688c0fe7f" [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "crc32fast" version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-queue" version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "displaydoc" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d" dependencies = [ "serde", "typeid", ] [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys 0.59.0", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "filetime" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.59.0", ] [[package]] name = "flate2" version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", ] [[package]] name = "flexi_logger" version = "0.29.8" dependencies = [ "chrono", "cond_sync", "crossbeam-channel", "crossbeam-queue", "either", "flate2", "glob", "hostname", "libc", "log", "notify-debouncer-mini", "nu-ansi-term 0.50.1", "regex", "serde", "serde_derive", "serde_json", "temp-dir", "thiserror", "toml 0.8.19", "tracing", "tracing-subscriber", "version-sync", ] [[package]] name = "form_urlencoded" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "fsevent-sys" version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" dependencies = [ "libc", ] [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hostname" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9c7c7c8ac16c798734b8a24560c1362120597c40d5e1459f09498f8f6c8f2ba" dependencies = [ "cfg-if", "libc", "windows", ] [[package]] name = "iana-time-zone" version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", "windows-core", ] [[package]] name = "iana-time-zone-haiku" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ "cc", ] [[package]] name = "icu_collections" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ "displaydoc", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_locid" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", "litemap", "tinystr", "writeable", "zerovec", ] [[package]] name = "icu_locid_transform" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", "icu_locid", "icu_locid_transform_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_locid_transform_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" [[package]] name = "icu_normalizer" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", "utf16_iter", "utf8_iter", "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] name = "icu_properties" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", "icu_locid_transform", "icu_properties_data", "icu_provider", "tinystr", "zerovec", ] [[package]] name = "icu_properties_data" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" [[package]] name = "icu_provider" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", "zerovec", ] [[package]] name = "icu_provider_macros" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "idna" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ "idna_adapter", "smallvec", "utf8_iter", ] [[package]] name = "idna_adapter" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ "icu_normalizer", "icu_properties", ] [[package]] name = "indexmap" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "inotify" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdd168d97690d0b8c412d6b6c10360277f4d7ee495c5d0d5d5fe0854923255cc" dependencies = [ "bitflags 1.3.2", "inotify-sys", "libc", ] [[package]] name = "inotify-sys" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" dependencies = [ "libc", ] [[package]] name = "instant" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", ] [[package]] name = "itoa" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "kqueue" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c" dependencies = [ "kqueue-sys", "libc", ] [[package]] name = "kqueue-sys" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" dependencies = [ "bitflags 1.3.2", "libc", ] [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", "redox_syscall", ] [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "litemap" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" dependencies = [ "serde", "value-bag", ] [[package]] name = "matchers" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" dependencies = [ "regex-automata 0.1.10", ] [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miniz_oxide" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" dependencies = [ "adler2", ] [[package]] name = "mio" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", "log", "wasi", "windows-sys 0.52.0", ] [[package]] name = "notify" version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c533b4c39709f9ba5005d8002048266593c1cfaf3c5f0739d5b8ab0c6c504009" dependencies = [ "bitflags 2.6.0", "filetime", "fsevent-sys", "inotify", "kqueue", "libc", "log", "mio", "notify-types", "walkdir", "windows-sys 0.52.0", ] [[package]] name = "notify-debouncer-mini" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaa5a66d07ed97dce782be94dcf5ab4d1b457f4243f7566c7557f15cabc8c799" dependencies = [ "log", "notify", "notify-types", "tempfile", ] [[package]] name = "notify-types" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "585d3cb5e12e01aed9e8a1f70d5c6b5e86fe2a6e48fc8cd0b3e0b8df6f6eb174" dependencies = [ "instant", ] [[package]] name = "nu-ansi-term" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" dependencies = [ "overload", "winapi", ] [[package]] name = "nu-ansi-term" version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "once_cell" version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project-lite" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "proc-macro2" version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "pulldown-cmark" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" dependencies = [ "bitflags 2.6.0", "memchr", "unicase", ] [[package]] name = "quote" version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "redox_syscall" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ "bitflags 2.6.0", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata 0.4.9", "regex-syntax 0.8.5", ] [[package]] name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ "regex-syntax 0.6.29", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax 0.8.5", ] [[package]] name = "regex-syntax" version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustix" version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", "windows-sys 0.59.0", ] [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "semver" version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" [[package]] name = "serde" version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_fmt" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d4ddca14104cd60529e8c7f7ba71a2c8acd8f7f5cfcdc2faf97eeb7c3010a4" dependencies = [ "serde", ] [[package]] name = "serde_json" version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] [[package]] name = "sharded-slab" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "sval" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6dc0f9830c49db20e73273ffae9b5240f63c42e515af1da1fceefb69fceafd8" [[package]] name = "sval_buffer" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "429922f7ad43c0ef8fd7309e14d750e38899e32eb7e8da656ea169dd28ee212f" dependencies = [ "sval", "sval_ref", ] [[package]] name = "sval_dynamic" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f16ff5d839396c11a30019b659b0976348f3803db0626f736764c473b50ff4" dependencies = [ "sval", ] [[package]] name = "sval_fmt" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c01c27a80b6151b0557f9ccbe89c11db571dc5f68113690c1e028d7e974bae94" dependencies = [ "itoa", "ryu", "sval", ] [[package]] name = "sval_json" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0deef63c70da622b2a8069d8600cf4b05396459e665862e7bdb290fd6cf3f155" dependencies = [ "itoa", "ryu", "sval", ] [[package]] name = "sval_nested" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a39ce5976ae1feb814c35d290cf7cf8cd4f045782fe1548d6bc32e21f6156e9f" dependencies = [ "sval", "sval_buffer", "sval_ref", ] [[package]] name = "sval_ref" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb7c6ee3751795a728bc9316a092023529ffea1783499afbc5c66f5fabebb1fa" dependencies = [ "sval", ] [[package]] name = "sval_serde" version = "2.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a5572d0321b68109a343634e3a5d576bf131b82180c6c442dee06349dfc652a" dependencies = [ "serde", "sval", "sval_nested", ] [[package]] name = "syn" version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "synstructure" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "temp-dir" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc1ee6eef34f12f765cb94725905c6312b6610ab2b0940889cfe58dae7bc3c72" [[package]] name = "tempfile" version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] name = "thiserror" version = "2.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08f5383f3e0071702bf93ab5ee99b52d26936be9dedd9413067cbdcddcb6141a" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "2.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f357fcec90b3caef6623a099691be676d033b40a058ac95d2a6ade6fa0c943" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "thread_local" version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", ] [[package]] name = "tinystr" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ "displaydoc", "zerovec", ] [[package]] name = "toml" version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit 0.19.15", ] [[package]] name = "toml" version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", "toml_datetime", "toml_edit 0.22.22", ] [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "winnow 0.5.40", ] [[package]] name = "toml_edit" version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", "winnow 0.6.20", ] [[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tracing-core" version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", ] [[package]] name = "tracing-log" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ "log", "once_cell", "tracing-core", ] [[package]] name = "tracing-subscriber" version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term 0.46.0", "once_cell", "regex", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", "tracing-log", ] [[package]] name = "typeid" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e13db2e0ccd5e14a544e8a246ba2312cd25223f616442d7f2cb0e3db614236e" [[package]] name = "unicase" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-ident" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "url" version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] [[package]] name = "utf16_iter" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] name = "utf8_iter" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "valuable" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" dependencies = [ "value-bag-serde1", "value-bag-sval2", ] [[package]] name = "value-bag-serde1" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bb773bd36fd59c7ca6e336c94454d9c66386416734817927ac93d81cb3c5b0b" dependencies = [ "erased-serde", "serde", "serde_fmt", ] [[package]] name = "value-bag-sval2" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a916a702cac43a88694c97657d449775667bcd14b70419441d05b7fea4a83a" dependencies = [ "sval", "sval_buffer", "sval_dynamic", "sval_fmt", "sval_json", "sval_ref", "sval_serde", ] [[package]] name = "version-sync" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835169da0173ea373ddf5987632aac1f918967fbbe58195e304342282efa6089" dependencies = [ "proc-macro2", "pulldown-cmark", "regex", "semver", "syn", "toml 0.7.8", "url", ] [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ "windows-core", "windows-targets", ] [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" dependencies = [ "memchr", ] [[package]] name = "winnow" version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] [[package]] name = "write16" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "yoke" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", "yoke-derive", "zerofrom", ] [[package]] name = "yoke-derive" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerofrom" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", "syn", "synstructure", ] [[package]] name = "zerovec" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" dependencies = [ "yoke", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", "syn", ] flexi_logger-0.29.8/Cargo.toml0000644000000162350000000000100116070ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.72.0" name = "flexi_logger" version = "0.29.8" authors = ["emabee "] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "An easy-to-configure and flexible logger that writes logs to stderr or stdout and/or to files. It allows custom logline formats, and it allows changing the log specification at runtime. It also allows defining additional log streams, e.g. for alert or security messages." documentation = "https://docs.rs/flexi_logger" readme = "README.md" keywords = [ "file", "logger", ] categories = ["development-tools::debugging"] license = "MIT OR Apache-2.0" repository = "https://github.com/emabee/flexi_logger" [package.metadata.docs.rs] all-features = true rustdoc-args = [ "--cfg", "docsrs", ] [lib] name = "flexi_logger" path = "src/lib.rs" doctest = false [[example]] name = "colors" path = "examples/colors.rs" [[example]] name = "colors2" path = "examples/colors2.rs" [[example]] name = "dedup" path = "examples/dedup.rs" [[example]] name = "entry_numbers" path = "examples/entry_numbers.rs" [[example]] name = "filter" path = "examples/filter.rs" [[example]] name = "performance" path = "examples/performance.rs" [[example]] name = "rotate" path = "examples/rotate.rs" [[example]] name = "version_numbers" path = "examples/version_numbers.rs" [[example]] name = "write_writer" path = "examples/write_writer.rs" [[test]] name = "test_age_or_size" path = "tests/test_age_or_size.rs" [[test]] name = "test_colors" path = "tests/test_colors.rs" [[test]] name = "test_custom_log_writer" path = "tests/test_custom_log_writer.rs" [[test]] name = "test_default_file_and_writer" path = "tests/test_default_file_and_writer.rs" [[test]] name = "test_env_logger_style" path = "tests/test_env_logger_style.rs" [[test]] name = "test_error_channel" path = "tests/test_error_channel.rs" [[test]] name = "test_error_channel_error" path = "tests/test_error_channel_error.rs" [[test]] name = "test_external_delete" path = "tests/test_external_delete.rs" [[test]] name = "test_external_rename" path = "tests/test_external_rename.rs" [[test]] name = "test_file_writer" path = "tests/test_file_writer.rs" [[test]] name = "test_file_writer_as_writer" path = "tests/test_file_writer_as_writer.rs" [[test]] name = "test_force_utc_1_panic" path = "tests/test_force_utc_1_panic.rs" [[test]] name = "test_force_utc_3" path = "tests/test_force_utc_3.rs" [[test]] name = "test_force_utc_4" path = "tests/test_force_utc_4.rs" [[test]] name = "test_json" path = "tests/test_json.rs" [[test]] name = "test_kv" path = "tests/test_kv.rs" [[test]] name = "test_mods" path = "tests/test_mods.rs" [[test]] name = "test_mods_off" path = "tests/test_mods_off.rs" [[test]] name = "test_multi_logger" path = "tests/test_multi_logger.rs" [[test]] name = "test_multi_threaded_cleanup_async" path = "tests/test_multi_threaded_cleanup_async.rs" [[test]] name = "test_multi_threaded_cleanup_use_utc" path = "tests/test_multi_threaded_cleanup_use_utc.rs" [[test]] name = "test_multi_threaded_dates" path = "tests/test_multi_threaded_dates.rs" [[test]] name = "test_multi_threaded_numbers" path = "tests/test_multi_threaded_numbers.rs" [[test]] name = "test_multi_threaded_numbers_dedup" path = "tests/test_multi_threaded_numbers_dedup.rs" [[test]] name = "test_multi_threaded_stderr" path = "tests/test_multi_threaded_stderr.rs" [[test]] name = "test_no_logger" path = "tests/test_no_logger.rs" [[test]] name = "test_parse_errors" path = "tests/test_parse_errors.rs" [[test]] name = "test_reconfigure_methods" path = "tests/test_reconfigure_methods.rs" [[test]] name = "test_recursion" path = "tests/test_recursion.rs" [[test]] name = "test_restart_with_no_suffix" path = "tests/test_restart_with_no_suffix.rs" [[test]] name = "test_rotate_immediate_compression" path = "tests/test_rotate_immediate_compression.rs" [[test]] name = "test_rotate_naming_variants" path = "tests/test_rotate_naming_variants.rs" [[test]] name = "test_specfile" path = "tests/test_specfile.rs" [[test]] name = "test_textfilter" path = "tests/test_textfilter.rs" [[test]] name = "test_trc" path = "tests/test_trc.rs" [[test]] name = "test_trigger_rotation" path = "tests/test_trigger_rotation.rs" [[test]] name = "test_utils" path = "tests/test_utils.rs" [[test]] name = "test_windows_line_ending" path = "tests/test_windows_line_ending.rs" [[test]] name = "test_write_modes" path = "tests/test_write_modes.rs" [[bench]] name = "bench_reconfigurable" path = "benches/bench_reconfigurable.rs" [[bench]] name = "bench_standard" path = "benches/bench_standard.rs" [dependencies.chrono] version = "0.4.22" features = ["clock"] default-features = false [dependencies.crossbeam-channel] version = "0.5" optional = true [dependencies.crossbeam-queue] version = "0.3" optional = true [dependencies.flate2] version = "1.0" features = ["rust_backend"] optional = true [dependencies.hostname] version = "0.4" optional = true [dependencies.log] version = "0.4" features = ["std"] [dependencies.notify-debouncer-mini] version = "0.5" optional = true default-features = false [dependencies.nu-ansi-term] version = "0.50" optional = true [dependencies.regex] version = "1.1" optional = true [dependencies.serde] version = "1.0" optional = true [dependencies.serde_derive] version = "1.0" optional = true [dependencies.serde_json] version = "1.0" optional = true [dependencies.thiserror] version = "2.0" [dependencies.toml] version = "0.8" optional = true [dependencies.tracing] version = "0.1.36" optional = true [dependencies.tracing-subscriber] version = "0.3" features = ["env-filter"] optional = true [dev-dependencies.cond_sync] version = "0.2" [dev-dependencies.either] version = "1.9" [dev-dependencies.flate2] version = "1.0" [dev-dependencies.glob] version = "0.3" [dev-dependencies.serde_derive] version = "1.0" [dev-dependencies.temp-dir] version = "0.1" [dev-dependencies.tracing] version = "0.1.36" [dev-dependencies.version-sync] version = "0.9" [features] async = [ "dep:crossbeam-channel", "dep:crossbeam-queue", ] colors = ["dep:nu-ansi-term"] compress = ["dep:flate2"] default = [ "colors", "textfilter", ] dont_minimize_extra_stacks = [] json = [ "dep:serde_json", "dep:serde", "dep:serde_derive", ] kv = ["log/kv_serde"] specfile = [ "specfile_without_notification", "dep:notify-debouncer-mini", ] specfile_without_notification = [ "dep:serde", "dep:toml", "dep:serde_derive", ] syslog_writer = [ "dep:libc", "dep:hostname", ] textfilter = ["dep:regex"] trc = [ "async", "specfile", "dep:tracing", "dep:tracing-subscriber", ] [target."cfg(linux)".dependencies.libc] version = "^0.2.50" optional = true flexi_logger-0.29.8/Cargo.toml.orig000064400000000000000000000051451046102023000152660ustar 00000000000000[package] name = "flexi_logger" version = "0.29.8" authors = ["emabee "] categories = ["development-tools::debugging"] description = """ An easy-to-configure and flexible logger that writes logs to stderr or stdout and/or to files. \ It allows custom logline formats, and it allows changing the log specification at runtime. \ It also allows defining additional log streams, e.g. for alert or security messages.\ """ documentation = "https://docs.rs/flexi_logger" edition = "2021" keywords = ["file", "logger"] license = "MIT OR Apache-2.0" readme = "README.md" repository = "https://github.com/emabee/flexi_logger" rust-version = "1.72.0" [lib] doctest = false ## docs.rs-specific configuration [package.metadata.docs.rs] # document all features all-features = true # define the configuration attribute `docsrs` rustdoc-args = ["--cfg", "docsrs"] [features] default = ["colors", "textfilter"] async = ["dep:crossbeam-channel", "dep:crossbeam-queue"] colors = ["dep:nu-ansi-term"] compress = ["dep:flate2"] dont_minimize_extra_stacks = [] json = ["dep:serde_json", "dep:serde", "dep:serde_derive"] kv = ["log/kv_serde"] specfile = ["specfile_without_notification", "dep:notify-debouncer-mini"] specfile_without_notification = ["dep:serde", "dep:toml", "dep:serde_derive"] syslog_writer = ["dep:libc", "dep:hostname"] textfilter = ["dep:regex"] trc = ["async", "specfile", "dep:tracing", "dep:tracing-subscriber"] [dependencies] nu-ansi-term = { version = "0.50", optional = true } chrono = { version = "0.4.22", default-features = false, features = ["clock"] } crossbeam-channel = { version = "0.5", optional = true } crossbeam-queue = { version = "0.3", optional = true } flate2 = { version = "1.0", optional = true, features = ["rust_backend"] } hostname = { version = "0.4", optional = true } log = { version = "0.4", features = ["std"] } notify-debouncer-mini = { version = "0.5", optional = true, default-features = false } regex = { version = "1.1", optional = true } serde = { version = "1.0", optional = true } serde_derive = { version = "1.0", optional = true } serde_json = { version = "1.0", optional = true } thiserror = "2.0" toml = { version = "0.8", optional = true } tracing = { version = "0.1.36", optional = true } tracing-subscriber = { version = "0.3", optional = true, features = [ "env-filter", ] } [target.'cfg(linux)'.dependencies] libc = { version = "^0.2.50", optional = true } [dev-dependencies] cond_sync = "0.2" either = "1.9" flate2 = "1.0" glob = "0.3" serde_derive = "1.0" version-sync = "0.9" temp-dir = "0.1" tracing = "0.1.36" #env_logger = '*' # optionally needed for the performance example flexi_logger-0.29.8/LICENSE-APACHE000064400000000000000000000251361046102023000143250ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.flexi_logger-0.29.8/LICENSE-MIT000064400000000000000000000020371046102023000140300ustar 00000000000000Copyright (c) 2018 The AUTHORS Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. flexi_logger-0.29.8/README.md000064400000000000000000000162361046102023000136610ustar 00000000000000# flexi_logger **A flexible and easy-to-use logger that writes logs to stderr and/or to files, and/or to other output streams, and that can be influenced while the program is running.** [![Latest version](https://img.shields.io/crates/v/flexi_logger.svg)](https://crates.io/crates/flexi_logger) [![Documentation](https://docs.rs/flexi_logger/badge.svg)](https://docs.rs/flexi_logger) [![License](https://img.shields.io/crates/l/flexi_logger.svg)](https://github.com/emabee/flexi_logger) [![Build](https://img.shields.io/github/actions/workflow/status/emabee/flexi_logger/ci_test.yml?branch=main)](https://github.com/emabee/flexi_logger/actions?query=workflow%3ACI) [![unsafe forbidden](https://img.shields.io/badge/unsafe-forbidden-success.svg)](https://github.com/rust-secure-code/safety-dance/) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9610/badge)](https://www.bestpractices.dev/projects/9610) ## Usage Add `flexi_logger` and `log` to the dependencies section in your project's `Cargo.toml` (`log` is needed because `flexi_logger` plugs into the standard Rust logging facade given by the [log crate](https://crates.io/crates/log), and you use the ```log``` macros to write log lines from your code): ```toml [dependencies] flexi_logger = "0.29" log = "0.4" ``` To provide the log specification via env variable `RUST_LOG` and get the log written to stderr, add to an early place in your main: ```rust flexi_logger::init(); ``` Or, to provide a default log spec programmatically, use ```rust flexi_logger::Logger::try_with_env_or_str("info, my::critical::module=trace")?.start()?; ``` or, to get the log e.g. written with high performance to a file, ```rust use flexi_logger::{FileSpec, Logger, WriteMode}; let _logger = Logger::try_with_str("info, my::critical::module=trace")? .log_to_file(FileSpec::default()) .write_mode(WriteMode::BufferAndFlush) .start()?; ``` There are many more configuration options to e.g. * decide whether you want to write your logs to stdout or to a file, * configure the path and the filenames of the log files, * use file rotation, * specify the line format for the log lines, * apply a stateful filter before log lines are really written, * define additional log streams, e.g for alert or security messages, * support changing the log specification on the fly, while the program is running. See * the documentation of module [code_examples](https://docs.rs/flexi_logger/latest/flexi_logger/code_examples/index.html) for a bunch of examples, * the [API documentation](https://docs.rs/flexi_logger/latest/flexi_logger) for a complete reference. ## Minimal rust version The minimal supported rust version is currently "1.72.0". ## Crate Features Make use of the non-default features by specifying them in your `Cargo.toml`, e.g. ```toml [dependencies] flexi_logger = { version = "0.29", features = ["async", "specfile", "compress"] } log = "0.4" ``` or, to get the smallest footprint (and no colors), switch off even the default features: ```toml [dependencies] flexi_logger = { version = "0.29", default_features = false } log = "0.4" ``` ### **`async`** Adds an additional write mode that decouples `flexi_logger`'s I/O from your application threads. Works with `log_to_stdout()`, `log_to_stderr()`, and `log_to_file()`. See [here](./docs/diagrams.pdf) for a performance comparison of some write modes. Adds dependencies to [`crossbeam-channel`](https://docs.rs/crossbeam-channel/latest/crossbeam_channel/) and [`crossbeam-queue`](https://docs.rs/crossbeam-queue/latest/crossbeam_queue/). ### **`colors`** (*default feature*) Getting colored output is also possible without this feature, by implementing and using your own coloring format function. The default feature `colors` simplifies this by doing three things: * it activates the optional dependency to `nu_ansi_term` and * provides additional colored pendants to the existing uncolored format functions * it uses `colored_default_format()` for the output to stderr, and the non-colored `default_format()` for the output to files * it switches off coloring if the output is not sent to a terminal but e.g. piped to another program. **Colors**, or styles in general, are a matter of taste, and no choice will fit every need. So you can override the default formatting and coloring in various ways. With switching off the default features (see [usage](#usage)) you can remove the `nu_ansi_term`-based coloring but keep the capability to switch off your own coloring. ### **`compress`** Adds two variants to the `enum` `Logger::Cleanup`, which allow keeping some or all rotated log files in compressed form (`.gz`) rather than as plain text files. ### **`dont_minimize_extra_stacks`** Normally, `flexi_logger` reduces the stack size of all threads that it might spawn (flusher, specfile-watcher, async writer, cleanup) to a bare minimum. For usecases where this is not desirable (see [here](https://github.com/emabee/flexi_logger/issues/95) for some motivation), you can activate this feature. ### **`json`** Adds an additional format function `json_format` that prints the whole log line in json format, like this: ```text {"level":"WARN","timestamp":"2024-03-14 10:04:57.299908 +01:00","thread":"XY","module_path":"test_json","file":"src/test_json.rs","line":32,"text":"More foo than bar."} ``` Adds dependencies to `serde`, `serde_derive`, `serde_json`. ### **`kv`** If you use the `kv` feature of the `log` crate to enrich the log-macro calls with key-value pairs, then you should also use the `kv` feature of `flexi_logger` so that these key-value pairs are also written by the provided [format functions](https://docs.rs/flexi_logger/latest/flexi_logger/#functions). ### **`specfile`** Adds a method `Logger::start_with_specfile(specfile)`. If started with this method, `flexi_logger` uses the log specification that was given to the factory method (one of `Logger::with...()`) as initial spec and then tries to read the log specification from the named file. If the file does not exist, it is created and filled with the initial spec. By editing the log specification in the file while the program is running, you can change the logging behavior in real-time. The implementation of this feature uses some additional crates that you might not want to depend on with your program if you don't use this functionality. For that reason the feature is not active by default. ### **`specfile_without_notification`** Pretty much like `specfile`, except that updates to the file are being ignored. See [here](https://github.com/emabee/flexi_logger/issues/59) for more details. ### **`syslog_writer`** Adds `SyslogWriter`, a `LogWriter` implementation that sends log entries to the syslog. ### **`textfilter`** (*default feature*) Adds the ability to filter logs by text, but also adds a dependency on the regex crate. ### **`trc`** An experimental feature that allows using `flexi_logger` functionality with `tracing`. ## Versions See the [change log](https://github.com/emabee/flexi_logger/blob/main/CHANGELOG.md) for more details. flexi_logger-0.29.8/benches/bench_reconfigurable.rs000064400000000000000000000014371046102023000205020ustar 00000000000000#![feature(test)] extern crate test; use flexi_logger::{FileSpec, Logger}; use log::{error, trace}; use test::Bencher; #[bench] fn b10_no_logger_active(b: &mut Bencher) { b.iter(use_error); } #[bench] fn b20_initialize_logger(_: &mut Bencher) { Logger::try_with_str("info") .unwrap() .log_to_file(FileSpec::default().directory("log_files")) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); } #[bench] fn b30_relevant_logs(b: &mut Bencher) { b.iter(use_error); } #[bench] fn b40_suppressed_logs(b: &mut Bencher) { b.iter(use_trace); } fn use_error() { for _ in 1..100 { error!("This is an error message"); } } fn use_trace() { for _ in 1..100 { trace!("This is a trace message"); } } flexi_logger-0.29.8/benches/bench_standard.rs000064400000000000000000000015011046102023000173030ustar 00000000000000#![feature(test)] extern crate flexi_logger; extern crate test; #[macro_use] extern crate log; use flexi_logger::{FileSpec, Logger}; use test::Bencher; #[bench] fn b10_no_logger_active(b: &mut Bencher) { b.iter(use_error); } #[bench] fn b20_initialize_logger(_: &mut Bencher) { Logger::try_with_str("info") .unwrap() .log_to_file(FileSpec::default().directory("log_files")) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); } #[bench] fn b30_relevant_logs(b: &mut Bencher) { b.iter(use_error); } #[bench] fn b40_suppressed_logs(b: &mut Bencher) { b.iter(use_trace); } fn use_error() { for _ in 1..100 { error!("This is an error message"); } } fn use_trace() { for _ in 1..100 { trace!("This is a trace message"); } } flexi_logger-0.29.8/docs/ComparisonWriteModesOnConsoleAndFile.xlsx000064400000000000000000000251341046102023000234270ustar 00000000000000PK!A7n[Content_Types].xml (Tn0W?DV[$xX$(}'fQU%Ql[&<&YB@l.YO$` r=HEV5 ӵLb.j""%5 3NB?C%*=YK)ub8xR-JWQ23V$sU.)PI]h:C@im2 3 1 g/#ݺʸ2 x|`G㮶u_;ѐUOղwj s4ȥ-ZeN xe|o, 1ysi޺s V788wa:  CrhݝAPK!U0#L _rels/.rels (MO0 HݐBKwAH!T~I$ݿ'TG~-ơVenO,O|U9CQ>gs|5,@Pۀ[ $͙E@б$,rOQ~JV_Hi:U#5f]Eיs[rsq{crqyE+iںj5< PK!>xl/_rels/workbook.xml.rels (RMK0 0wvt/"Uɴ)&!3~*]XK/oyv5+zl;obG s>,8(%"D҆4j0u2jsMY˴S쭂 )fCy I< y!+EfMyk K5=|t G)s墙UtB),fPK!+ )&xl/worksheets/sheet1.xmlXے6}OU݀2&Tf7yXU\6O qĘ[iF{#YisB|o_4 qJ82?pa~Y17\D|fErY\X4g9P] rQڮL,&gS2z8irKӘɋ-Ke1{䣄f9PHJGIjY2{:)w)~G~ f񎥌$-`垻C;aD|W"R?%4nܖIIC&fe{9(\:h֫ 3= UÇ\G1żL~+><='d"?w 9@YA'NS I_SYqj3vcg1o?6e^=>ėI~x`| jEk\$`rb M72"^8ȳ]n)2C }C2R.\xxHYb$ P@0J xSj_Q-&w["|bсrG鼮M*6xVlgxh>iϚϵ؟*c>ցݖR:b'Ŀcq3&k.@$ x^~N*F" #"]ĸAذfg@2xgKkjwE*b-fMDR9 @NVXz5]*aõ .NU=ڙjR%ODTRj3! ^KVk"R8 `EՀ opVVsi9E9.}*𡫻K^| zvKD:uwȨnDɐ:}ǁޞW q5HQ"A,jkY.DO Rq*t{^UwOy\5'q3r;+H[\(>WUQQ]0[1բ^1UV$̫b*S#GTJ뇲~*2zSAŤ*kجZ*ꔲf',)8]!J=>UU@Q]۔VNJdVU\P )-ї}X.[/7Y%ͪ{Q%G6̶3"f#\gd0z-S3Z7m.Xّ #Ňx0 ;|4]vCZ;oDÎ'΁R^ g/9f;4PJ)#e?EfNg1q&uN|9~$F{Bsyi_v PK!N xl/theme/theme1.xmlY͋7? sw5%l$dQV32%9R(Bo=@ $'#$lJZv G~ztҽzG ’_P=ؘ$Ӗk8(4|OHe n ,K۟~rmDlI9*f8&H#ޘ+R#^bP{}2!# J{O1B (W%òBR!a1;{(~h%/V&DYCn2L`|Xsj Z{_\Zҧh4:na PաWU_]נT E A)>\Çfgנ_[K^PkPDIr.jwd A)Q RSLX"7Z2>R$I O(9%o&`T) JU>#02]`XRxbL+7 /={=_*Kn%SSՏ__7'Ŀ˗:/}}O!c&a?0BĒ@v^[ uXsXa3W"`J+U`ek)r+emgoqx(ߤDJ]8TzM5)0IYgz|]p+~o`_=|j QkekZAj|&O3!ŻBw}ь0Q'j"5,ܔ#-q&?'2ڏ ZCeLTx3&cu+ЭNxNg x)\CJZ=ޭ~TwY(aLfQuQ_B^g^ٙXtXPꗡZFq 0mxEAAfc ΙFz3Pb/3 tSٺqyjuiE-#t00,;͖Yƺ2Obr3kE"'&&S;nj*#4kx#[SvInwaD:\N1{-_- 4m+W>Z@+qt;x2#iQNSp$½:7XX/+r1w`h׼9#:Pvd5O+Oٚ.<O7sig*t; CԲ*nN-rk.yJ}0-2MYNÊQ۴3, O6muF8='?ȝZu@,Jܼfw-xRB-}wTBuT—ViA l;MICQ La)?;V#mg0V*M6aLj4РFjz՚ U۲>O3 )L z_3>\歒GK6R}dV_= _J#9$ׄfV%(e{ bo1z88?@P>C`ݡ*mqz?jrzYcYlNCfae"8d33Ё6Ncgtqv/p}tuNQT朶bvgFu(c˼ad$6#i56"08dp\i9?GgWEC{iDq5]P-j[=}yNlG{"]H)_^Q_ajmzF{ *[TV /%׫ʂ(wv1Zva9\zrv"{xܳ( %aUA)Yx2$iId=ya8X0A9\34B`1'PK!g72GYxl/sharedStrings.xmlTMO@ #@nTUVHU[qq2Nb1 a}=ITzZ~{~z9x(~S\./ @_%lw d5=n=J۞EhMѦVe)U%9v&glJ 1u|q *}WW{2.?۵v,i]̑?Qq)h6#saDuq4 C@;M6i8%.Pγ]v(C e]F6R%ME:ًG{ɠ(,AD;X:k> f6 FW HiO5U$}3lwЉ~w˽&_a _PK!;m2KB#xl/worksheets/_rels/sheet1.xml.rels0ECx{օ CS7"Ubۗ{ep6<f,Ժch{-A8 -Iy0Ьjm_/N,}W:=RY}H9EbAwk}m PK!;75--'xl/printerSettings/printerSettings1.binԱJPV\;*DA""i:Y C1 jU $_蠸3$'s._IzP.tkoSΜhWgkAAmݯdڑ?&ԡOzZwq֒zW7WlFȵJS0ɿRW ͵9t]X;sѱYg|.JEBGڎq!5cp>          WPK!8aHmdocProps/core.xml (_K0CɳmڎɁD-$w[X$ۛv[s/\R:U$E(P =F+׷7%3i V^I9Lvc;%VRv e{KSOq@Dg$g|ڦp ;%z9+#hBs18N ƶmv3zzBub3,Pmh4E+J7n u~6+#*G!9U(oygqzu:#Ŕ..BceGbD_}PK!aI docProps/app.xml (Ao0  9P bHWagMc$׏z{xDI:_Pkx]~ 1@%F_|RdrGDKVRm3`96elg6ڗɫp 5ԗ(UO qwL շ9bl~`_.jk5x%̰qiՃ txmWmJ&;ԌOHY?-lc9kE/G!`aqj6&;90N8ہo:s7^O'{d‘SÅg|Hxk^y>TdN> =o2!dݚ0m xl/_rels/workbook.xml.relsPK-!+ )& xl/worksheets/sheet1.xmlPK-!N xl/theme/theme1.xmlPK-!H\L ~xl/styles.xmlPK-!g72GYxl/sharedStrings.xmlPK-!;m2KB#nxl/worksheets/_rels/sheet1.xml.relsPK-!;75--'p xl/printerSettings/printerSettings1.binPK-!8aHm!docProps/core.xmlPK-!aI a$docProps/app.xmlPK & 'flexi_logger-0.29.8/docs/Threads in flexi_logger.md000064400000000000000000000037351046102023000202640ustar 00000000000000# Threads in `flexi_logger` ## src/threads.rs ### "flexi_logger-flusher" * called in Logger::build if NOT WriteMode::Direct, WriteMode::SupportCapture, WriteMode::BufferDontFlush or WriteMode::BufferDontFlushWith(_) is chosen * pub(crate) fn start_flusher_thread( * flushes primary writer and other writers with flush_interval cadence * stack_size(1024) ### "flexi_logger-async_std_writer" * only available with feature "async" * called in constructor of StdWriter if WriteMode::Async or WriteMode::AsyncWith is chosen * [cfg(feature = "async")] pub(crate) fn start_async_stdwriter( * flushes, or writes to stdout or stderr * rust default stack_size = 2 \* 1024 \* 1024 ## src/writers/file_log_writer/state.rs ### FLW: "flexi_logger-async_file_writer" * only available with feature "async" * Called in intialization of the FLW, if WriteMode::Async or WriteMode::AsyncWith is chosen * [cfg(feature = "async")] pub(super) fn start_async_fs_writer( * flushes, or writes to the FLW's buffer * rust default stack_size = 2 \* 1024 \* 1024 ### FLW: "flexi_logger-file_flusher" * ONLY USED if FLW is used in custom LogWriter implementation * Called in intialization of the FLW, if WriteMode::Direct, WriteMode::SupportCapture, WriteMode::BufferDontFlush or WriteMode::BufferDontFlushWith(_) is chosen and if flush_interval > 0 Note that flexi_logger sets flush_interval = 0 for its "embedded" FLW! * pub(super) fn start_sync_flusher( * flushes the FLW's file * stack_size(1024) ### "flexi_logger-fs-async_flusher" * only available with feature "async" * Called in intialization of the FLW if WriteMode::Async/With and if flush_interval > 0 * pub(crate) fn start_async_fs_flusher( * triggers the flush on the "flexi_logger-async_file_writer" * stack_size(1024) ## src/writers/file_log_writer/state/list_and_cleanup.rs ### "flexi_logger-fs-cleanup" * only called when explicitly configured * pub(super) fn start_cleanup_thread( * calls remove_or_compress_too_old_logfiles_impl * stack_size(512 * 1024) flexi_logger-0.29.8/docs/diagrams.docx000064400000000000000000002257371046102023000160100ustar 00000000000000PK!;[Content_Types].xml (ėn0DE'iE4@+[(_ 8^JP`Μ rqdWF* 47қY}!L &P}A!دz|n,h\)S,Wlz>:B w(N3n8dߚ*VV\OZG 9*%ĕG/UGS8E * )Tm4eYqh:{\ɼ]Qҧ;Zw Ʒ!`G{X- 1A0ƯZw-Fpr79c&@mؗ^Չ׺[48@ioBmo3ͤmo3ICf%ICf%ͻICf mhbk cm֝!^q~m.!qu >݉zBuz ^3lPK!N _rels/.rels (j0 @ѽQN/c[ILj<]aGӓzsFu]U ^[x1xp f#I)ʃY*D i")c$qU~31jH[{=E~ f?3-޲]Tꓸ2j),l0/%b zʼn, /|f\Z?6!Y_o]APK!_cxıVword/document.xmlWKn80w Dvö,t25MHڎ{\d'-˛%WU/oS)VF;{?0@}v(sLLuio(IegIaya&k'T>0)f8+]ocazw5XY/djE $fb*YSd.))JfNi<E יd'7x NR1 }`2 ]5!p03/+;EXt݋0LOȵw;ޡ\;p<: {]}IAL#X*yڏ6x-=? ӞqH6MYAJ cH}m $&a8aA PXP *[T8A6C?I2#zs!"38rn)?gs}9v$GRuZ%% M. to=&6]{ Fƞdy ]71)[DU3v&NӄF36bV#ub{mUzsĄqfԬ))Y\Je4,ԁsDcmg(~et} ~jF?,П?({;d3AgOӇ`cv@#BB6eZ>]q*;*bQOZyqe__ۋ!,90e~/-;8{yDGW\T8S%g=7>?[]o+(WHm V1in53ҹvi%i~l)M7ҋlc^?߯lj\ki%ź<2~?qYo$i2~:Dc&6rŋɬ}Qwa\dӉm #Mzʳs^9emgTUOLߤnZn7@ML,:{M慭y 5秓OY#ϕ 5zuy^#*۾ithtu8బ6u&JQX.2߭M&¹\Zlh3BzVnZu`]J.x6bcf6ڎ pB85B|ytײԷj%76->MY6sBK˳BN\5pI[j &ɲIXMF?߬Utju[Qn 4[O@ٞ=Acrӫ,uQeŃYo=|5+*Ty,Zy. PfF. P=! P}<g%J|[X(/c#ޘX.zT29,Վ*Ji.\Z1>_ַU^4s|6kXCYB-zť/g|ϊ~&k]+%ǀ(SNii \?^q }sVb䄐j!;r;vȤIZM")/;En,ȉG!E in<|GVmo8+Q7Q)@[jq Ήsߌޛ`gޞ!vK='Kv&bqmvD"]OQ=5Nfm5M ꟰_ֈLw):3&*nl*ֱb-}V xEi^>ۆI>_ѠE4|oIq⃴[VJ9[47TI4W`19YѺu,0԰oY~'a1G[v 'aޞ^, Vc`DY蘿` 8-ǥ#`@=[HQ+vs mwj)ƛkNV#oz!U:15[֍짇 mdh^ȆtK"f:yE,"J6yy+Q+wJUiU\ 2mpWwPZAVl'ULI0O0=y[m%B_zK$O0̔ \:Ch#I\Eq\DI&9e#.Ot O=,XNjF॒ uF [o_f sFvYj 51ͣ?\Q8gp=,ȃe@ۉ( YX>~ӽR%Nyb ֒mSet-:TӚ5D@pv!8+S<6cP%n/pe<:'lOEa܄ U`{ #_ʃ@e3l(ء^~D6$bCN<'0L m`y(ـ珘dNK۩)iԖBMQL%آ]PĀ4RuV\ּU<pޅсތWCz橃Fvup>+/B |KG`DH7bgč h5{peN r6 4J3Jd8p'[~Ql,PiHx1Kؖ~{<~T'^ u (4˽\.PK!Nword/_rels/document.xml.rels (n0@@.U\[,?*޴k 6Z`qAAmwЦV2fgL,c>1Ϡh`6 )'kp$n~)LÅ!ڳ R5#5X -2s"(&*՗T# > V; 9b^x 'a0D/F-j&UbsipHZM#͇xiNlBn32!VșKd+ͩ9Zl1 YUG`b 2|IuzgzRWPc%pSym,;*Φஅ?rbv^)_ec~זL ;b Zl O Ipe :y4ZN+C#{cN\Xv#;Hn??(dO=Et XMI9c^vHpBZdٳU;v[ݫ$9xeb*5H!#].fA5QZi'F|^`\9Lk~v]$CEn#?]"zʽDzi'[{}'ȩ{u¹ #N^3Z!UOgc7U~yLX8IXκQNzPK!+ޥ0 word/endnotes.xmlԖn WwO;ImUv(& 9p:913 n,Di* 7>"|=Me8 ߿ )h"N W`gL xG7b%ؘ, B0lI%0ڮiPa-Ub !eȡcWCb>(A55Ϊ(VH8fHa4GIq^:~ IŐj RXDӌe(aU6M&!)ɦiC++Iw'~5 5J(p7PBp=l,{5  <.*OU(;21C!&fc Y-<*4' 3Lw'qp]q6X>s:Dء#ZT"~]`z$׌y!ik 3kMfW{muI+`]_"1GiH .D'2Jس팖iꊹ럓ګT?z$g?qՂ/ʽD:i'!NQ0j?^s5ʍp} [yh|T5|PsT3A>?rֽ zPK!u&&word/charts/style5.xmlZn0~@ڎmHcR Mv麧8Nk9߹ fXzɈ35B3+WXi ~QD/(^\P׸p苄x S<u{ QHb `"XH>VS.$,R oFH?S2w"arIɟȒVdZZ!KO=PL~{M`MB.eUr/ө[p0D^'`kŧl :L0~r,5RVbC;qlflµ$؋EO¬7hcɊf`Τo&'S;2j7UMgP;k<ŽηˈH:#q벚uchc ^P?E f aE $ؕuv53]OmRYՠ^/{^V$L >Ś(wXB1nKH7YB%O-9(HFh|Dǭnko;pe tLs_n6 ł gӻvvq󎎹SD J4#Yq߸:˸:kK8 yJa-a q:2ШI.TҐ'NG(f/D9]G~e,  R[)U4ɔVm-hVcD^Lc%xIw)")Q]{+zKJk'UkO#Sѫ DJ`E6$pҞ魃aM۸^$'R[fvZ[quG*Z'8Ԡ<ʅڷs圼kl6lEQXV^ăo3_gn_ov?|"g٭~^߷auztfΆW뾿<^w}v~{uﶗvDž h5]yoo7~pBvC76nw煕uf%ɐuӷf𳟚|1_6W/N/vݺ]ݰ;&w/}Ew5{TŰFl7fϏ˾y6a]æk&`. uo7{\gvY0 F}d:~CwǤnVz_ͺ}?&uv_=t?oݙ-{fpz}¾y>{5ÏMgn5\wWmu7ۋOٛa-&_6v~Y?l_}lSBw}f>bqlfǗ=˗~~pnwOk6?au8)1ϩ:3$kL/U^W=MzV۶=]۬?Z\[-d_¸ÿ]6̶y7pޜ~)ۻ^,V}};e[K?ڒѐNxŚ~vyKύ~~hʧjySߏ;1 g;-触{?8 ol-o~nݞwG{ m8݅aw4i#x:M7|]AZNZ cE82Pmt)V]N7_w!o}gj'㟇fXc_O &caX8qNjٰGO_a?1f|OǕ?61~ŎEX+EXĸ;E2Lj"R,"c?RivSnM%f7fTivSnM%f7f~9VivSnM%K\%K\%K\%K\%K~[%fAӻ"4K8bA$4KXbAS&4KxbA(4KbAӼ*ȳQ`!\X Ӝly@9܆݀ NfOA #nX `9aN3tQHuPQXvQhwН33303P3p333О33303P3p333П34404P 4p444Р4 4$40(4P,4p044484С<4@4D40H4PL4pP4T4X4Т\4`4d40h4Pl4pp4t4x4У|44404P4p444Ф44404P4p444Х44404P4p44444Ц44444 40;Kp9MsZnN sZnN sZnN sZnN sZnN sZnN sZnN sZnN+ܜv҃3.}ܜVz9#rsZIҗi7J"79ҥsZSZÜVZ9ҹsZ{J#i.F*00000000000000000000000000000000000000000000000000000000000000000000000t0t0t0SFœ(VnN'X9dt<2xVnN'#Z9it2ɸVnNs:eh+0SœN*yaN r92t@W)C]aN v97979797979797979797979797979797979797979797979797979797979797979797979797979797979797979797979797979=79=79=79=79=79=79=79=79=79=79=7r 000W9=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=79=7)9\a2ϸåd. (*rDik#\\q2۬'dk#œk#œk#œk#œk#œC"œw9E3rmD3.sFaaIaIaĵaĵa!a/#̙}90gWL>œ_G3) `Τ\ÜI9res&Lʕ=̙v3)0g$LE;̙x/I9res&L8̙q3)0gR.aΤ\EÜI9`/k̙%d9L0gt L._%3˵s&~`į̙X9K0gWb LJ,_d%3s&~#6I.W19`O LD`O LD`O `%3u93` l`O `Oe &U93?%3SY93?%3SY93?%3SY93?%3* J9rڀ93ё`ά6`<$3; C93{M0gV-0gO0gV-0gV-0gVN0g6O0gXN0gN0gV0gV_YJsfsfsfޛ`ά`ά`%MJ0.Ks.@s.s ̹I \>s.s ̹K0R9K.̹T0R9#H5ِ GHRJ(|* 'I(&z(|@R(iC({&saΥr9ʁ\*Vs 9J s3aΥr s.s. dG,R6K?d)c~KYz) TLY,%Sfʂe)JY,'KٔLR7e pʂ TNYoX) ,SJ&)T*R=E) |R9#KE*Jŋ,TYJ(%PQd)L=2|2*DY(g)̺2rR*Yj( g)2pr*ܶY(sf)LY2]*ʔbYj(Ӆe)򕥪2W*UY(sd)2L*l3Yj(sd)<랥pr*3Y(Of)<\Vx%VG,EVr UV7H2+_I~%'VxtTZQRjEayoL|32R(ԷdYU̓ojgzJZe,||YʙA!XY_Y,aY*,/:.O?k}kO>֎ڮvf;n_N_ct˾T&/ M󔇷ˎSfFj/t{R #o%JMRdgZ\ J6%J]WfO_ RL&ӻ(sZ萫N]kY)9P0!,9^%tU2ϯg22Xɳd\+Sv"ʜ"Z~Ud#?RAWXz@啙mWf-()Vf)V;* V&;*U-U-nJWWjS(=t)F"F.<IX̟cFM,ԛ0wC*.`qn e63RtixƥQQJ9im>+'=RndRHT5/ ,{wQnɓq^y2[ȓqJiy2N9郔)_S2.O)Ųe6=l,ʊl,ZLfg8ƈ0oTPN6RuBitRuB"H 6XS)t.JJ;HCT:zT:V1RQ1R鐛HC~/҈>SLɷN**@;G5B%ɇH(g)XTcQWFD&5ʕTuF9)B(Eڰ RT xN2DQ)q-#2S lV,^ARQ[HTJ5t: N%Ig2BIw$dJdx|IRiw2I*Fe0"JmX!RVI;OS鼜[RT JߤRTC8;-|#q| 7\CʇCJpSY2Eq+T> T걋0JK<,Y+Y<+\TLc0FGY{Y> +ye6de%TbpOߧ22R$+GCYQ, f>Kx# 0*EF9΂܁#@E9q'*DԽ13ϡtY*13~9QI(@T>>rǁ~*ˑ~Jjqm~Ccg6wR,S yV,TbpP<ʷK*"T< WU BCxPr̄<~cgxPɡ **_ pBMy2H? y)Jc'G:!1o _c?乑C޺PGd;T>ڒ\Vy !CG Rs#EꕵX*+1RRuM$g2w ,0=?LdeX*5!CpdvS)닎R?Ky~'TR?H#<>yyHIzzeFL"<~ ?R=Iyd~b<x#<|ۏ=GROtG_ICW!C0f+s\tzvevKR?:Hxȓ<="<~x{}#YyHJ-t%vV*JH=2TR\YJA*+T=JsA2Գ )!֭&FhJRo2Գ+#)yHzWjj.ԏ 5gn2KՔVSRK/򥂹zva5%dX/VSBg5XoCWdR\Yzv9R?դF9ߢդ>zەxHʌ<~ܕWxH+Af\!jJHʳr<~dՔugVSBmn5:,ݺդ޸A[T!RgTWRͮTWBG.njRY\YqWԇCJEqe)֭&ĕjە:kvU~Y?l_6lӇ/̇o-4ahJ_\|9Ytݍ/jl?<S8|3)~=?Wm|ٽi4}Yޜrߵ?nvNĿߟw9ϻMVS.$,R oFH?S2w"arIɟȒVdZZ!KO=PL~{M`MB.eUr/ө[p0D^'`kŧl :L0~r,5RVbC;qlflµ$؋EO¬7hcɊf`Τo&'S;2j7UMgP;k<ŽηˈH:#q벚uchc ^P?E f aE $ؕuv53]OmRYՠ^/{^V$L >Ś(wXB1nKH7YB%O-9(HFh|Dǭnko;pe tLs_n6 ł gӻvvq󎎹SD J4#Yq߸:˸:kK8 yJa-a q:2ШI.TҐ'NG(f/D9]G~e,  R[)U4ɔVm-hVcD^Lc%xIw)")Q]{+zKJk'UkO#Sѫ DJ`E6$pҞ魃aM۸^$'R[fvZ[quG*Z'8Ԡ<ʅڷs圼kl65Bac&th)n@YVLJ@;zNc(lՙlVlɚ'=SeyJ5e\]9ƌW)?#Oǯf3/Q9t{m?PK!&{(rword/charts/chart2.xmlݏGv`}H(h!"Z}K"{$Fr$/O7:%N<ù>]]]|v~w57_,vگ7W{~Cvkzzt˻fΆVW7}z6/wn;mk ^"^2?o|sYvןҵۦ8V&vgGݬaߞV4̄K\Auӷf|1?6ק/Ovݺ]_p;&GO}C]wPs omӽ{7\v8^x7f߶?op5_/_*"]K+ ض 2 ;^%{,f}լގ2L*T[Ӎ?~xuzͣWݬ;_5E ǧff7ܙv A}|kϚjxP/}7r<пu{POC_ yu,|7}bqfǏ9}Ǘ~`|lv]{3W.pm?39x LQP_Wm7~ڶnֿf=?#GCy^é壯-TP͇:wQm}e.C?~X9tߪ* "Ciޚ~˳vJϕn ]aj.m <}TuvtGm?x߭m8r]y|z6cO{;3^!n& Ӏ/*v7 CYոۭax߳x:n_6̜et-|0kYw>i/ jN_َ~<}4wi]:u0ڡlaw2r✞塆=6jpwzY"'aY-EY/EYؕ:F$JDfD,Ya䳊Q-jjjk(kHkhkk먺먺x-:-:-:9PumQQumQQumQQumQSu]QSu]QSu]Q&멺멺멺멺멺멺x[ T]_ T]_ T]_ T]_ T]_ԍTPԍTPԍTPԍTPԍTPԍVԍTPԍTPԍTPԍTP=u4>wNJKn..Kn..K-.kT%(bA]#T%HbA^%T%hbA5^'T%bAU^)T%b:,8Z,4GqqX@knm@Gfc  !n⨱ ʁs\qQDg@uF:pQdg@wxg80g@yczg8g@{|g80g@}c~g8g@h80h@c$h8 0h@Dh80Ph@cdh8ph@h8"0h@c'h8*h@/h820h@c7h8:h@?i8B0i@cG$i8J0i@ODi8R0Pi@cWdi8Zpi@_i8b0i@cgi9iilZi9mipZi9q i3 0U 3sZΜi9sZ0i3sZΜi9sZ0ie3 re3YDΜV9sZIie.3DΜV9sZ0UVTsZmZQfm`NL-92"*`NL192洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN 洜9-r`N˙ӂ9-gN t9-q`NǙӂ9gN t9-q`NǙӂ9gN t9-q`NǙӁ9gNt9)S3s:Μ8s:03s:Μ8s:03s:Μ8s:03s:Μ8s:0d+gN'X9s:YʙMֲre5+gN'Y9s:YʙɚVΜNVrt3s:ei+)[NY tWY?s:e+)\NY tRW0S9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9qt`NǙӁ9gNt9st`NϙӁ9=gN9st`NϙӁ9=gN9st`NϙӁ9=gq{t {03szeS$s`NϙӃ9=gN9=s`NϙӃ9=gN9=s`NϙӃ9=gN9=s`NϙӃ9=gN9=s`NϙӃ9=gN9=s`NϙӃ9=gN9=s39eO%gN?U}9eo%gN/+9sz_əӃ9M`Nlsze%k[-`Nlsze%+.3szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03sΜ8sz0g3sΜ8sz0g3sΜ8s0g3gsΜ4;`q29 '3p `q29 '3p `q29 '3p `q29 '3p `q29 '3p `qs8)0gbsN8)0gbsN8)0gbsNApR ׇb>$'0 ?$ˏG()zAI J0gP9+zAI J0gPR9' 3pR `I19' 3pR `I19' 3pR `I19' 3pR `I19' 3pR `I19' 3p `-9G 3pt `-9#G3r* `ȩ,9#挜3r* `Ψ3* p9\`'3*T 9#挜"3r*`ȩ,9#挜"3r*`ȩ,9#"3r`Y*9#g挜"3r`1)9#Ǥ"3rL`g"3r`Y*9#g挜"3r` (9#'挜"3r` (9#'挜"3r` (9#'挜"3r`&9#n"3r`&9#n"3r`&9#JFR0gT9rT*IG%v+G%ǬzT*G%,G(f9mD5i#N*:AqN9mD0gFsFN9HD0gSN9mD0gSNSsF-9D0gHFs&N̙8HD0gLg#3}s&޳`{̙g0gRz`ΤIك9ҳs&gLJ̙83YLJ̙,Is&gLJ̙83)q0gR`ΤtIE9ҋs&>wk3.ds&ޅL`t ̙x.9%0gݵL3Xs&K`{b ̙xO,9d%0g❬L| 69.+y0&0g/LE3us&N` ̙_sfN`3ysf6O`_ ̙593%0gT ̙,93%0g毲U3WYsfJ`U3qk̙@Gsf|!93yH`G3ʻ̙9'0gV-`ά[Yym93g3sN`άY1sfŕ̙3+o0gI ̙73+o0&%0]s.w s&0Ry9|6)9|(9ʻ̹|K\*t`ΥFs.7Ɇ$~(|!# _n&(L‘$/ DO$%FQ^⎢4Gks4`ΥD9J \* +s9]s`Υs.Ys.x,x+,e;YRabk~,0ePcʂdfLY#gʂϬe1MYy,) dMYnRߔg,) fqNY',uʂjb$)SI$}✑?E1b8^dqPQL!X(Y*JV,F*J,N*J,V*J,^*J,f*J,n*J ,v*Jn,~*J,*J,*J,*JJ,*J,*J,*J,*J,*J,*J,*J,*J,*J,*^,*,*ʞ,*.,++''F+|^:OVPV>̒V{oK[,K[(K[%mܷJthMY(;=P`C;B\,]Hؤ.*h4$jz$g%? UVUC$sЄ|* CzHQ?O}P! <`,k? A(A׸ҥPtEc4И Ϥк'YAhYd?ۢ!XCDl PG!~EXsKk+~2KkKOvwRB5]غRC-f/+s L *1xY1f,aBɻO \jXenLS(C&WI cFG9{,+ IdQbJ s*JIe &%xBRO^fI<#II~@)3H-Y2N뒄\q5[=<2c3SO9S_E21qs w~]w\X ~{r.W1ybbĹ].*@a7`Q;F41mij]_qNTс5)K%:K5gxsWQ.36~gpoNnȋrp<\v\^\_,~y닧_:{e2ய>V&^ݣpfrsYyٽ /WoGקrmq4=%'C~2bvլz=WvG>eg]7_gl~Vm1|իmr+5EG&tt ;~?q黇Umw=w{X7n37)vy/.v_hp?ݟzV{Š}hㆮl]oM?x{vfzw5_|?췛v{eC;?>1oޟЄz8~߻||99 n}ڛv5֟ǫ~&W|vm;\hf{{*nk@Pſ۟oY~ÒK~Mɇ~WݬWxnV7ݡ9/ev7?7@naw?gQMJV'-ο\ %Kpɥ]j_ H2y{ qMoV_>iGl7Cl 7q= *u^Ƨ~lȍ{y7< PK!u&&word/charts/style2.xmlZn0~@ڎmHcR Mv麧8Nk9߹ fXzɈ35B3+WXi ~QD/(^\P׸p苄x S<u{ QHb `"XH>VS.$,R oFH?S2w"arIɟȒVdZZ!KO=PL~{M`MB.eUr/ө[p0D^'`kŧl :L0~r,5RVbC;qlflµ$؋EO¬7hcɊf`Τo&'S;2j7UMgP;k<ŽηˈH:#q벚uchc ^P?E f aE $ؕuv53]OmRYՠ^/{^V$L >Ś(wXB1nKH7YB%O-9(HFh|Dǭnko;pe tLs_n6 ł gӻvvq󎎹SD J4#Yq߸:˸:kK8 yJa-a q:2ШI.TҐ'NG(f/D9]G~e,  R[)U4ɔVm-hVcD^Lc%xIw)")Q]{+zKJk'UkO#Sѫ DJ`E6$pҞ魃aM۸^$'R[fvZ[quG*Z'8Ԡ<ʅڷs圼kl65Bac&th)n@YVLJ@;zNc(lՙlVlɚ'=SeyJ5e\]9ƌW)?#Oǯf3/Q9t{m?PK!)[word/charts/chart3.xmlяFA'a"uu;^x8gc>ˁ#QΔx&߯I!w⇱DuW}5l]}IyXכ_]-+lz}toźē}gvW<϶u/y74x}5< 'OP7N/v)߯ݺ|Y;) w6mF~qnmZR\6]="mtyTT7tûӁp?lmbu\ϫlT>p$Ex{7yvpܷx wML. 0_̖3Q\nǪ<ݐKK3\w(«^ c5\^< 7oISw~kuvo&;7ퟂg[cTUl tkahH:_7/˪ձMY:o2Ŭ.[:*^ob_]6T4uU2/Cm>9\7\ 0-BYu߾fv?>Oh`r*@׻f]~>Pi9Sױ'/PU&hgjvy[+rۿ>_rŢq4-b9aؽ7?q/L#CPϭ==dꟅ? e;%OXI l36nKld}MAZ9X$ ,B*Yx*Vq=*꒤j꒨꒬꒰*뒴BuI[ں$Pq]RW$uפPu%+T]I Uw@$u=UW+I]OՕARSu}RSu}RSu}R7Pu}R7Pu}R7Pu}R7Pu:ZnHnHnH&uU7$uU7$u3nHfTݐͨ!QunF0QunF͒U7Kf|ǩ>sqs??tqt@)@tPqtAiA tq&tBǩ B0t q6tCC@tqFtD)DPtPqVtEiE`tqftFǩFptqvtGGt qtH)#Ht$PqtIi'It(qtJǩ+Jt,qtK/Kt0qtL)3Lt4PqtMi7M)8S8r:0p_is gNs gNs gNs gNs gNs gNs gNs gNs gNs gNs gNs gNs gNs gNs gNEDΜˈ9b!+KZ.&h'BqcEQ5E]T4VuYXWscm)"S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s S8s s0)`NϙS3szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz f53׀VΜ"+AFTqjEBq#U[V o5[ވpsz#(W03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03sz9Nz08sz9Nz08sz9Nz0szN9)z0Cszw9y0|szn9y0szNe9y0szN@8x0gp0g0 sn0g0 s6 0g0h sNVdD%`A"9g 3p=9w3pW;93p69X3pg49wF3vōW``lws0gXs0gNVsd0gQs0g^Os0g~As>P0gPs>-R(R̙a#93>l0gƇ `Ό̙ɗ̌>̙)FsfFw̙̌]3|tVKIA7[rL ݒ̙-̙ ̙37 0gΑ$9sE|it`D`zsas.8)0ڷ_Ӡ+{tfFnuf֣̖FҵXb UrX{Fd}xr4v8p6|Ηܺ/*fENot$3Phf*yg*y񿐩Gn }#'| H]F>ם^:wGnT(GCƎ&n6khƂW ݺ'GϛƮqIk#D~`nD# (OlFȕ+FF1F=4K.oƬdc̻(m|e@4fl f9 0uGXឥ.x?pF*eΡXsXqx,Khp3cxu<[ 8YgwggF9W{{sQ;vz3SŹ ;[I\8ݙB^Nv󦢋>ꢏ@}\̋@C>2i1㤁4@D 4I 442;hU& 箉.x440hhDB_ln3{ŝŋmmK>ۖFD9_t*QR|jCxͨ5SŘHbb^dU1<*@#[&V1/2̍XY~Kc;ra^.0H8|i%iQOSޚZ%puiGW4v+f{Pkijبzۦ.otǘcАCcw)4(!Ӣ܌6܌ mngflmnF'u͸P#ns3,ij*ƌV1kjr͍1JF:OcIJ҂4GS v41U^ZJ>ōL8a}Yƣ 4zӥ^#+?o̤Bj^N뾒(ʃU6pIΠKc\[ Hmt/Mb$*j4t(&<nyV5bK.t}j\ݦu~O-&5~_G?_O͊bјonUYs9ڳq'\ȯu:R(43 YQOYа: ќ'F8th .4Ǧq7oaӰμ49A*OYskr5ߖq 4Ə0ͪi*6hͼehJMn5yAFT Ci6hFq *4plSHD%r?ȟ+`Q^Dp8sD \ fKI`O&.ch8 L,1 '`. 0A5Jx4qoM &0 |d\^o`8 P`悚0nNn&&in2.FrAӎpDqƫ). 8IRW&&X&:n2ƚR@F0/&q&oqQAex55T"M{p#M[M@ldjn23(qAG f}x zMJSriBѢd\]&#&j(7Wf\id\] &=&7Ww4q?h{􃛌 ?ɨ>hDD΀tdhhϠi= qu ɸ8M5MardP]&0.4mrqu5fqd\]]&mWɸ M՟!&7WW~;j0WW޸ɸd\]]w&M՟&ꎳ0Lg5Ma;j`0Ww4aBԝD)F)|^}6\e/󯯢u{k/W_/䥬>Nա^f׳Y/گuSj]gv[MS|dlh>\~}x_es*W/o۫\.ϯ|d!oNv]t] ?ߨ˺rݮ>L7Ń:nʪ>Mݞܝ?7MVy&j_\W~^&x3VE_i{x7ջt5޾ծ7}*ot9;tYl}!ntxvqӻ 4asr-}90K7Wncn2I[fzwPƻt}[h>Jy݁㱝@Cl*=|{:?,|Yo)yӍ՛7ͤU~]v{S4E/䧲龍U(jwlwxcKx_4eTⰎOZv͹b mXÛTM.<s?\wkvtl՝mٷSR__ jMbwtr[cuu{IYҫTw:W?=>U _Ο_w͹(e>l;sz߾_~~Q]ߠ_#ɁX,k=Tg|ovEsqG_,ֽ, ˫ܽZ,_.sY,KhFl/כ+^ƂVǾo]&.nmf} @r"?qT=PˊZݻS>o8W?bnbST/c%N]TS1Vgv?/PK!u&&word/charts/style3.xmlZn0~@ڎmHcR Mv麧8Nk9߹ fXzɈ35B3+WXi ~QD/(^\P׸p苄x S<u{ QHb `"XH>VS.$,R oFH?S2w"arIɟȒVdZZ!KO=PL~{M`MB.eUr/ө[p0D^'`kŧl :L0~r,5RVbC;qlflµ$؋EO¬7hcɊf`Τo&'S;2j7UMgP;k<ŽηˈH:#q벚uchc ^P?E f aE $ؕuv53]OmRYՠ^/{^V$L >Ś(wXB1nKH7YB%O-9(HFh|Dǭnko;pe tLs_n6 ł gӻvvq󎎹SD J4#Yq߸:˸:kK8 yJa-a q:2ШI.TҐ'NG(f/D9]G~e,  R[)U4ɔVm-hVcD^Lc%xIw)")Q]{+zKJk'UkO#Sѫ DJ`E6$pҞ魃aM۸^$'R[fvZ[quG*Z'8Ԡ<ʅڷs圼kl65Bac&th)n@YVLJ@;zNc(lՙlVlɚ'=SeyJ5e\]9ƌW)?#Oǯf3/Q9t{m?PK!aZPword/charts/chart4.xml_Hv;(>dVQ^m{gmg!/[bۊ)QC^wOT+\zI3m:Uݺ?OE}3Næﮦ?|b54m~eu(E3ӓ?n.7}{7$\nx95>o!~v[owm'ߗ3$ _ݦx^mšNReFcζqZ~vnSWMu~2œlzMm0)/;XwÁp`]:Ӳ-CY3A7>qsX&cURSS uQ,uSLuUluWuYjK %s뒻BݕPw%+&w+]JrW}K]Iz$w=uW듻듻듻]O]O]O@@@@ h!!!x ݐ ݐͨ!QwCr7nF͒u7Ã%w3nͨYr7ϳnF͒ n]Pw%wErw|Q]Pw(Pwuw]Pw/$wErwI]$wErwI]&werwI]&w%berwI]&werwŇcuw]Qwuw]QwWuw]QwWuw.uw]QwW5uw]nrwM]%wurwM]'wurwM]'wurwM]$kE j01sA@9g9bΩbs,,@9g9b颟EbI 1bI +Y S3HNQ`99 9t`:@3X89dv9Nwx89x39Ny89z9N{89|39N}89~9N89耀3:N8:$耂 :N88:D耄3:NX8:d耆:Nx8:耈3$:N8':耊,:N8/:而34:N87:耎<:N8?:逐3D:N8G:$递L:N88O:D途3T:NX8W:d逖\:Nx8_:逘3d:N8g:通l:N8o:pt N逜‘Ӂ93gD|)9)9)9)9)9)9)9)9)9)9)9)9)9)9)9E9s.#r攳D.%kh&rǍE]R4uQXUeEc])"SE0`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`N)`NϙS39=gNszΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz053pVΜ^Z9sVQjĵj` ǍV n5[5Ոosz#W07\3szΜ9sz03szΜ9sz03szΜ9sz03szΜ9sz03szΜ9Nz08sz9Nz08sz9Nz08szN9)z0Csz9y0|szw9y0szn9y0TszN@9y0gpÍsn0g0 sn0g0h s6 0g0h8mt[ZDs8#0g |3Qts> `G`490g06]9  |$3Xs> `Y(90gࣞ |\ 3us `3Ts `R̙Fsfl0g̙!93>g93>e`Ӂ93sfn4nsj1)fKIA[vs. \ ̹0w`΅s.80炏n4: 0O0璏zsis.9)0ڷ_?>g9)fLWKW!]; ynOy7 Vk֖60~/<5j؈EC\i\fF,.U Ɔ `R>#9ydk#NcV FI@EhDtF~ˈIHe6St`,'91-1!1ØU0Sy V0fylM$1|^9H$$ g| ր.u@s;g;F:oĤy-EV N6W–k\(,[²_G(,31 ˖0 |,㈏QXGf3#3قOb?[3hx&~u8B`?[&O+Szr㡠5T,_Ȳ5Dͯa$xV 8n<̔>)"nlFG4nM,uכ91 ył:JNO90U,ѕp!M^7 Mh(3|6~K&liFJpq`i[F 0i)Ө 1_CR3xt.-C@KK[l5-g^OҠ04VEb %3V0KWi4`|mGÁFcF'aFcȨF-LnuݴįKnj`2XS]+3]uF7+qtkN%~]Q__r_Sc:55XDщ?ѨvѠvkQDϣ0xj. ~_mKKa:˃5 cU8W\ >R(4~Kg+ڳf0gBC7uF8|hE~_hE<@o5͢ϼ*R,Rf6kEnW䗥A~Y[0Mh.f24"hZE^fР.yД(KeM%0(1Ds Z2n g} 4ʋ(. K0%5b023 &K\9.A 6 BW K0%@l*_ZGiN~4q K qA|>.ǛBLqx)3T\vr `KS.4`pɨgdj.ќ\2.qh9 hdgd]_%;kr\2v5[=>4Cj&1\2.qɸ%mWd]]qw5=Kp\2qwYM2qwuKլ\2Q.qɸksɌg`H}U!;Bq߾j&wjkWp=_g/\XsY/#h__c:ߗrWm{͚fUSݶlmٶ?f2waymwnqywxWe;"X>Xx1p/NN~֗9_E|uw5W7e_]q[٥wDc~v:o[_<7EJ;qvMY}:h}lXrm< L?&}uғχa˖/MEvSmԓjS->n^}7훼;'?u:-@QckcOǺ.a^\b mPcg(ն.Źnɮ{wIֻEmj:PxyNnZӐWmjMbw62a(tE">4>4__lW/Ke~xpۓWPś;0z˪}ZywvCyZ9>yz*8|%/ "pXˋj>8俞G1r,gsNm<^οq |5qvi3_w#fOG%:߼?x<㾦9>u|+qR_K_ÄDwWpο'PK!nword/charts/colors4.xmlMn Fbq`;[Yq6YW]1HD@sӴr a7-&.Zgml !Z=€_<ɰ@2T(xeUs>5Bac&th)n@YVLJ@;zNc(lՙlVlɚ'=SeyJ5e\]9ƌW)?#Oǯf3/Q9t{m?PK!nword/charts/colors5.xmlMn Fbq`;[Yq6YW]1HD@sӴr a7-&.Zgml !Z=€_<ɰ@2T(xeUs>5Bac&th)n@YVLJ@;zNc(lՙlVlɚ'=SeyJ5e\]9ƌW)?#Oǯf3/Q9t{m?PK!]Byword/charts/chart6.xml_FEGU$%5j{0XgbI-m)C oTn\v2<%a~ꦾݮ\6mWtRfw1WgMQbz_z|hڷ7ź98|Uߔu6|m6M1|W\ p]___oeݕv8HSVE8֒7#>Wzw^IYܛpWj=폽z_[|Wtmt-ZZl1sꀤ*u4yKxUTUp"T=6۫?]ݶUMݮ?87oISն9ChBi߆?U1< ea{cS\L?M9uke6fICMM) Ws%H$\UUt^kt!wu[WšݫLcjyKU wk7fؖgI~f7N2|;=A'MyꁋgU;,PãևcG?9y_~i~~o|X Kx8a_w73a}}hqU7zTPg}Kqw_.˪l MUϛj+2Ĭ6TZXKpؿsPQp~UW X%o{n61<*~[h8]j9yBZθ6݆6|x䴓T-멻>멻>y}]]]M>Rw}t7nJMQRwnJM)u74RwnFMCYIt7݌Ew3nEw3nͨYt7Yt7fݜEwsnͩYt7ݜGwsnu7pB pF@pJ`pNpRpVpZp^pb pf@pj`pnprpvpzp~p p@p`pHpLpPpTpXp\ p`@pd`phplp*p.p2p6 p:@p>`pBpFpJ(MP΀@3  s:ݘ|0Cs:8:0Cs:8:0Cs:8:0Cs:N=Ft ;C&#x:h逢1DC *rts:́92tq*s`Nǩ́9g)tqr`NYʁ9g)tqr`NYʁ9' tnqnq`NA9 t$qFp`N9gtqFp`N9;0vt?}s:u`N~s:;t`N;Yy'ˁ9=907z=`NoUhhhh9h9ޘF{F+F+F+FFFeFeFe<ިǟВszszszbszbsz|q`N)߁9=|F F sv`Δs9SsK9S)$L9Hx0g̙s)=3/s<)?y0g{=̙s5`Δ_Lyk)x0gʃ/̙~sf<nxgŇّ`ΌG=3dx҃93`(*`ΌWb̙8s漦`Μ'<379s.s漇9I9y0gΛW9Hx0g̙Fу9Fs.xɃ9nv0|S#f-q?5c5ADfcYFH/9o)gsczkuFK7&|_RHscO3r>{ R.hk4#:)'NIxi_'C:)'y s|A|t[3 f"8'hyQw}vE¢K"*vAQEL4q5DFD9nDyL~=tou ortW}7Mg.VK9xNZq%D?y'V!53FP[§1O."uA5h 5:ӀkthwktJ$%I\3I\} ٧\OFk&DAٸ/t ֩\RBK5?a >{T z45s iTI4G*4 .r ,( 7ahP@Fr<6-PcE=d.A$K`a \ <6DkPSg qA}7.iSsI4ȓ@ߍK+ Mqwx\5. _ {3¼s &]p %Xg4JdW:~G:C2^5 ɸ:Ju`?NadEKƫMؘD$,%&Ha 3.wf#3d{A%`9~\%f.wׂ Ǐc|\x{\2Zjʃ!A(Kd*T./%}Mq0$p i\26pɸ%jJ]qqw5Kk!wWSdf.w_Xq) \2#t~!wfH]!/5C2.`Kjdfq8.wfHg5MF02^3Qjd ɸ`5C2.X͐Cp\2qɸ`5C2qɸ:%㭚qɨ^`HFC2ꮦQw5]M'0$M.wWS׹d]M[qw5eKtu.wWgdrh)Q'wW\2Z3h!ww4A \2^vYM xg5MqVdGO)]]DQuK.?Mnp3je˳4Y^]&g~:Lzdvq'B#,Vg˅<{.] \f?u$kw1Wաr3`'VEbw\(d۲7eӭmWWUkn k\ZqoZB՗8YwK-}XIP=WG Tclåi vMqǎM|ʃ{*N-vӭ2`a~ea]hbzؿN~X>,]#_fks7ުWv/cRtroCIx(]kv[?f~؆}(ny<O?jY~?޴7}Yix8=| n^{px~w#_,_/.Ζ:/.I*9Ŏj-.ÅV7*㷰C'6v VS.$,R oFH?S2w"arIɟȒVdZZ!KO=PL~{M`MB.eUr/ө[p0D^'`kŧl :L0~r,5RVbC;qlflµ$؋EO¬7hcɊf`Τo&'S;2j7UMgP;k<ŽηˈH:#q벚uchc ^P?E f aE $ؕuv53]OmRYՠ^/{^V$L >Ś(wXB1nKH7YB%O-9(HFh|Dǭnko;pe tLs_n6 ł gӻvvq󎎹SD J4#Yq߸:˸:kK8 yJa-a q:2ШI.TҐ'NG(f/D9]G~e,  R[)U4ɔVm-hVcD^Lc%xIw)")Q]{+zKJk'UkO#Sѫ DJ`E6$pҞ魃aM۸^$'R[fvZ[quG*Z'8Ԡ<ʅڷs圼kl65Bac&th)n@YVLJ@;zNc(lՙlVlɚ'=SeyJ5e\]9ƌW)?#Oǯf3/Q9t{m?PK!C"word/theme/theme1.xmlYM7c7c;i&%GyFQI]%9 i顁z(m mS*i<ȖYl`)YZJ4WN㘦z4iqK-p3GܹᎈQO8ӝrKtR7,BVYT<~RU*rq&*n|@Tpv\#BcIU}9 Gt9NHщp\ȎSNyryiDۂP-ᤦX4Z6K@&n4? A gq1}6k@Y׫࿾zc5(+_Űʊ{}ӿeYݦנtxv Sr o{Y[WraueضxBS S4`G\xSR.+ʰR%Q`:k FxTtWy7/G/Nr食-V`^O?=x^g(W_?ųW|O,."'ܦe4b0hM#Sl,聈 9$Ђ!3w A f3-qb)%=ʬs*FaF٬ ml-TwlsȠyȔHG'Yalur:=!9#c5Den#(mf.QbsGG&R Hl.1xLaB=(bɃ9 s!3!B DlnA{<1L )-"t0Z94.b?D!E5wu6RrSӮ/Z*ukZx@m5AM1P[BPn4}Zu@J]x5AG?*gJ[su9`pyP_R JnݭZ^^z^:~PEI2_yo_})I{pY//tݺ6R%QQ[Rӭպn݇X˙yx5PK!u&&word/charts/style4.xmlZn0~@ڎmHcR Mv麧8Nk9߹ fXzɈ35B3+WXi ~QD/(^\P׸p苄x S<u{ QHb `"XH>VS.$,R oFH?S2w"arIɟȒVdZZ!KO=PL~{M`MB.eUr/ө[p0D^'`kŧl :L0~r,5RVbC;qlflµ$؋EO¬7hcɊf`Τo&'S;2j7UMgP;k<ŽηˈH:#q벚uchc ^P?E f aE $ؕuv53]OmRYՠ^/{^V$L >Ś(wXB1nKH7YB%O-9(HFh|Dǭnko;pe tLs_n6 ł gӻvvq󎎹SD J4#Yq߸:˸:kK8 yJa-a q:2ШI.TҐ'NG(f/D9]G~e,  R[)U4ɔVm-hVcD^Lc%xIw)")Q]{+zKJk'UkO#Sѫ DJ`E6$pҞ魃aM۸^$'R[fvZ[quG*Z'8Ԡ<ʅڷs圼kl65ֻp/Gx,Y{p{[{%fa,ff>UaT7n{1.v_ƣ}[mz/~'狛iVz 㛶=LzS۸zl6~mNM>YO4I_P@+ Tm}9]_nSoۡ^WmT`ݧR6IբwWPXS,L IiY̳|SgIq]monf/Ǒfqt6u۶ևn>KMռ=ս'yZ}ٗ7x VM/$7 _*@6&j H~opBiwJj:ajo?\]uyuu~[޿nFͮDo/W;}UU۝6^+^Ƕۣ }S^?UM=UEkm&AMw6oY_k9YU_UbZw5wuMi tx:En2c~ߩMf(觪ܭwuV6e} v[|Fr.lu:j3|*\&ޜî~>P^˩&W^hqRևTtdp3??aZvwG*֣7ʗeu|W̶v#~O6Vӎnxv۷daݞc`0e` +3H,B,Y('@Jr 3 *TQI TBU$P]% +TYI*VJ$Rq%T]M*UWkRWU&uw UWI@դnjR7PuCR7PuCR7PuCR7 !!!SuCR7ꆤnN IݜIݜ'usnyͩyR7I݂'u n-yRI݂[AF-UHT"[YRIݒ[$uKn-ERIݒ[&uKn G[&uKn-eRBRwJ-SnԝRuˤ;MNӤ;M&pĨӤ;MNӤcIUwԝQuIUwԝQugIUwԝQugIUwG;KΨ;KιԝSugI9UwԝSuI9UwԝSuI9UwԝSu P;LdT9p"<Rd)2@EƩ"Vd+2E"Zd-2E38ŚF9Ś+nQw8@!9C9 3sx@'a:@8TuNvvNwwNxxNyyNzzN{{N||N}}N~~NN΀N ΁N΂N΃ N$΄(N,΅0N4Ά8N<·@NDΈHNLΉPNTΊXN\΋`NdΌhNl΍pNtΎxN|ΏNΐNΑNΒNΓNΔNΕNΖNΗNΘNΙNΚNΛTTNTTΜ||23-`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩`N̩6șSm3M$rm*љKDg6ѦD((ڔ3hάM+:`Nuf-9ՙ]s*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*gNs*Is*Is*Is*Is*Is*Is*Is*Is*Is*Is 'RsbsN8)0gbsN8)0gbsN8)0gbsN8)0gbsN8)0gbsN8)0gC`s8 fuL- 3V'(dAN$:ꄊZh,j݂rt `1)9Ǥ 3p ` (9' 3p `A"9 $3pF` `= ܳ`= ܳ`CFP; ;~̙s'+9ssC̝3w`yl9s>9 `ܹ99 `qF#]Y~MS/}rܰg]nس0'(+)P8cbI3 r4?ZAwsqq'`#fԥE:1d3O.x&$Ntv"Li' q3ҥ7ψ;ߙBgLwF3t.1utYҹpݵ3`Lt֎  =³aA<CI!"yx"򞏬מ9h|$ v:D'tB- uxBgF >`s;NqgNF\q~ԙxBq  gL" Xڳb.|vsƅO9* Վ\< W;wjepG_js83Tcl ̝L4si:IJ@‰36ANh3w8lǑyy 8:?:ggbN:>[$QZlwg>'Fb Dags],3BoOƟN~Q>>X-͹,cZ>Y>JklN tR3@NtE,-i O'p+O;tR'α0+`0t uk:>6$Agg4`*(Id:٩`LǏb:0 N4_ >\z -rЙanϏpSicJJ'L@J=[\g>6pIl`ASiAgt{>Msc=:KLsXojcii<,ղҸ>dmY7K :3KHs|g-ן8Dz?nώ1C:[ShD;0מco=KOxw{^ySg2;6٩duĆ2fhdc`@fXIfʀB CBny?Bnԁ :` M 1NH/d/4qr0rMl$b0MN+l1r67Q jc3747479݂  sLN](Sp瘀 `ȟ` Fn{/79vzN) IRשkIa(!`h`ĂTx)d=J&`nrZ]K&;n&յSnrZ] 7&@gnu;79X`p,1qm& (_F@crZ]&յT.nrg=nrg.jb/ &ۮ0nnrZ0)79%lq P{A79nbnr&6Krt@ V^MNk/&ۮV^MNk/&յ~p ? QwR~۵7V7?tx>o./y,g^={1QߏVkG^wՏw˿-;;%<˵f/xg˼{6Y_|u|{~[oQlade_;nn>j]:n:}GU>.LeXa6th_ww*ֳ_u0ݬTd{pBՇ׻zWÖM_j-7H`~auƏx};Ua=_<B%&`^nڑ%^9~w]ϲ_C k~Yݾkaa0t_K_/~~\ߠ_.ݱ翬np/WgTv=ElRYggL^MggY2;&bNpLM8_Vm$ <h PkBT+cUF5=C* ve*aTCTY^y:](nJm42?#%{"$=sRr{1n5 Rbx r$JE)#_ wJUx,aHn`H؎u59-:= - ֢Zo6fCsX IRcA rG+i{YmVo: dA%xN b cѓ3nPqaN1k9`yCx'/#IF8u;knh&h*`Gے \j`a,%7`UD?q' T5mƪZRPf֪)KAsg)([Ӯo)(](Gbn>/ZW:EU4l^ )W(0қl뵘g0FA]t2ʎ&V6d,VjDr.|Ë ,.ЁDWcZ:eH[OM?EO[9\X I4TB`R\XW~y~'A.in`0ed#IpYӠ18+gtQ&b:sGęM灟?m/PK!3]w word/styles.xmlMs`flI 0 ,? U\cvE2ߦ;avV=˘O~ےf,{Mq_h8-2^ƚ)rI'_$əjS}u@pK.&,㷋 aqoqu{,37>tW?<EphO=Rsa)IӍ9)YQ)N霛Iʎ@g+8#e+h %[XvZj*0w* hBZp* ڻR\ )hBZ*D h:NBP!-T@ U{ ] ).BP!-T@ "BP!%T`$THA "BP[ ݅ qB.BB Z*D hBZ*0w* hBZB8B{BP!-T@ "BP!-T@ ; RBP!->O+t;ݼD^YE8)hpn6W-?\uIow]BL%8r%("ݴ΃kZi-*]Vxh1oֆ9mn̆!|l29o[vR@h Gpd'%U0:N=;?c((7WCa].T;jHpr5] QnjH=9 NwWC!p*ú{NV!jú!jrv5DThWCՐu5$8`] Qή6W(5WyLޠr+V//Ę'+E`5P}:ZJ˞`zlQhj ׿~r+Œ&G4#6lQm9_ܻ ;Zcy9rUϿLG˞(]~mէ{fbbF+3-Z^>9^7=h}Gqr&^Wܼ&Nn>[&."5<5[5u,.Hqʘ~/.,{%8%՘ΥzxYP}V^38) dNrU0겺#1U.F%^bm[ɲQ#=ڥZҡ'vuKy ʭM.Xm2ԫl2ik\JWF+'ʔ_:ըlG:L#:un³/ls3Np?;_PK!㻀9jword/webSettings.xml͎0c>bMVJW[U>cĪ }!z?1xQsms? \ȩ7jVOni\:"f+JsU*yqc|ly +L3jP.@D Vl'NeZ;mk4yځC>/ko"LlT\𣢍׽TIQCB?? >lm:%%Pl$n&jQ >Bޫ4ReU.%kBjntɷX< ^@D=_,C8r|d-]Ta7Eb5u@&!A(;},EE%brƲ1 ZҞe96W ?faɝKuO3=s26(M37d)%4th_To_ҲJ:q KPK!ء[ word/fontTable.xmln0+bk=eI#oo!)oX)IԐ543[[2 \Ɂ5Bc~t}J"S+7vϔ,yY :g@CLbg %Y (n)qJF_SUх`rTT漀ief;=ADɉT+PYTY)%@@|"Цl]O[i8Pt;Ι`$Z[?`#̊?̤d#1{V[M>[%8q#*H,®%~3 m|ŽtN40Μ֪ u}I47.3X4D%4<'cjuǖ` ~3pN@a$#f5#$Qfd\tuQCHɄ\yEt9AB-$&&cW +cJ0ulJJN$ j0(pc|@%y?!8^ܐ-Y~zߏ;a?Ʈio}cvPK!9word/numbering.xml̗ێ<x(Ωi6`ѢE\ZCd'=2#Xn%[{?3鋗[ά5QJ1KǶHdJrflKXIAfhg/6((0!tɒ,FH'+±4QRE~HbA6Rs\|)sk%~T 8`V9`] FEQ!2Qu@(D!MƑ~rp뒦H~#u҉w\fDB*scPdpN|L'l0WK~:0E\iC3P"/Zz\Kܿry#7G0B Y[|, W dK9k6۳\~՞TR}¯笊DD w6p@\gi^&goQ@ɾB ,S=á{aݞ}q0M!&Mzn-OK۷JٞFOXu>-O+AoI|R9 -G|YʷdkF`_:W8?:t s@bE`Qfu^-r^+mѭS^#kwilV{sYe5Wꄭ *3?CN\9116n-o7c$od}/ivYGe*5̞ze$+,臎E[)rm sJWN252J%r~"wfMȡA0NY(Jly0ռjW8ݾT;&PD\p^ β@ίd  ϴ&Vt7=ˊLGTqљV\lW:nZu+=FM¸Uk5_PK-!;[Content_Types].xmlPK-!N #_rels/.relsPK-!_cxıVCword/document.xmlPK-!N#word/_rels/document.xml.relsPK-!00D6 word/footnotes.xmlPK-!+ޥ0 word/endnotes.xmlPK-!u&&K!word/charts/style5.xmlPK-!5QQ<z!%word/charts/_rels/chart4.xml.relsPK-!uK;z! 'word/charts/_rels/chart5.xml.relsPK-!\aQ"!(word/charts/_rels/chart6.xml.relsPK-!-()word/charts/chart1.xmlPK-!u&&Rword/charts/style1.xmlPK-!nMWword/charts/colors1.xmlPK-!&{(rvXword/charts/chart2.xmlPK-!u&&2word/charts/style2.xmlPK-!nword/charts/colors2.xmlPK-!)[word/charts/chart3.xmlPK-!u&&word/charts/style3.xmlPK-!nlword/charts/colors3.xmlPK-!aZPword/charts/chart4.xmlPK-!nvword/charts/colors4.xmlPK-!nword/charts/colors5.xmlPK-!]Byword/charts/chart6.xmlPK-!u&&>word/charts/style6.xmlPK-!nword/charts/colors6.xmlPK-!C"word/theme/theme1.xmlPK-!u&&word/charts/style4.xmlPK-!}0g6pword/charts/chart5.xmlPK-!Hw;z!word/charts/_rels/chart1.xml.relsPK-!tѰ;z!Tword/charts/_rels/chart2.xml.relsPK-!<z!word/charts/_rels/chart3.xml.relsPK-! /zIword/settings.xmlPK-!3]w Jword/styles.xmlPK-!㻀9jword/webSettings.xmlPK-! YdocProps/app.xmlPK-!ء[ oword/fontTable.xmlPK-! ](-docProps/core.xmlPK-!9word/numbering.xmlPK&&( !flexi_logger-0.29.8/docs/diagrams.pdf000064400000000000000000000760151046102023000156150ustar 00000000000000%PDF-1.4 % 3 0 obj << /Producer (PDF-XChange Printer V6 \(6.0 build 317.1\) [Windows 10 Enterprise x64 \(Build 19042\)]) /CreationDate (D:20210521133242+02'00') >> endobj 5 0 obj << /Type /FontDescriptor /FontName /Calibri-Light /Flags 32 /FontBBox [-511 -269 1309 952] /MissingWidth 226 /StemH 85 /StemV 85 /ItalicAngle 0 /CapHeight 638 /XHeight 467 /Ascent 952 /Descent -269 /Leading 221 /MaxWidth 1350 /AvgWidth 520 >> endobj 6 0 obj << /Type /Font /Subtype /TrueType /BaseFont /Calibri-Light /FirstChar 32 /LastChar 121 /Encoding /WinAnsiEncoding /FontDescriptor 5 0 R /Widths [ 226 0 0 0 0 0 0 0 0 0 0 0 245 306 0 0 507 507 507 0 507 507 0 507 507 0 263 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 508 0 0 0 483 0 0 881 0 0 0 0 0 0 0 0 0 471 520 425 520 494 299 0 520 221 0 441 221 791 520 521 520 0 345 387 329 520 440 699 0 441]>> endobj 7 0 obj << /Type /FontDescriptor /FontName /CourierNewPSMT /Flags 32 /FontBBox [-122 -680 623 1021] /MissingWidth 600 /StemH 79 /StemV 79 /ItalicAngle 0 /CapHeight 571 /XHeight 423 /Ascent 833 /Descent -300 /Leading 133 /MaxWidth 600 /AvgWidth 600 >> endobj 8 0 obj << /Type /Font /Subtype /TrueType /BaseFont /CourierNewPSMT /FirstChar 58 /LastChar 121 /Encoding /WinAnsiEncoding /FontDescriptor 7 0 R /Widths [ 600 0 0 0 0 0 0 600 600 0 600 0 0 0 0 0 0 0 0 600 0 0 0 0 0 0 0 0 0 600 0 0 0 0 0 0 0 0 0 600 600 600 600 600 600 0 0 600 0 0 0 600 600 600 0 0 600 600 600 600 0 0 0 600]>> endobj 9 0 obj << /Type /FontDescriptor /FontName /Calibri /Flags 32 /FontBBox [-503 -313 1240 1026] /MissingWidth 226 /StemH 85 /StemV 85 /ItalicAngle 0 /CapHeight 638 /XHeight 470 /Ascent 952 /Descent -269 /Leading 221 /MaxWidth 1328 /AvgWidth 521 >> endobj 10 0 obj << /Type /Font /Subtype /TrueType /BaseFont /Calibri /FirstChar 32 /LastChar 148 /Encoding /WinAnsiEncoding /FontDescriptor 9 0 R /Widths [ 226 0 0 0 0 0 0 0 303 303 0 0 250 306 252 386 507 507 507 507 507 507 507 507 507 507 0 0 0 0 0 0 0 579 544 0 615 488 0 0 0 252 0 0 0 0 646 662 0 0 0 0 487 0 0 0 0 0 0 0 0 0 0 0 0 479 525 423 525 498 305 471 525 229 0 455 229 799 525 527 525 0 349 391 335 525 452 715 433 453 395 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 418 418]>> endobj 11 0 obj << /Type /FontDescriptor /FontName /SymbolMT /Flags 4 /FontBBox [0 -220 1113 1005] /MissingWidth 250 /StemH 86 /StemV 86 /ItalicAngle 0 /CapHeight 662 /XHeight 910 /Ascent 1005 /Descent -220 /Leading 225 /MaxWidth 1042 /AvgWidth 600 >> endobj 12 0 obj << /Type /Font /Subtype /TrueType /BaseFont /SymbolMT /FirstChar 183 /LastChar 183 /FontDescriptor 11 0 R /Widths [ 460]>> endobj 13 0 obj << /Filter [/FlateDecode] /Length 10060 >> stream xRڙs_MF璔NvM曪dLzfnc^FZH,;da,-ED/.wg0+&o/(,*(,oIFE}\<;v]'I~|x$ˏ'i\ݜru>splH'aiӝ]r쯋8saY?,oa\fA&Ue|ZQ\nH-9M*,*8Mȫsac|\_Ww7X~hC7@IQu4i+ 8-U4.ן/kprZ\?UZ//We_q럚e?~UkWyV\?->?׿/1}j}S?Uڼ풏7~)xo_Vۗ6_ZϷ@Y:a-ϛ՗znoa۵/>h&ZQG: ݾM~tj߶׭[Dk}|C^Mzv[[纙b?ҤelUM܃۷fڭ_\_5.+F~F v_{{c4Z^n_yzu7sZeޒ[y\\_ݶ_ܭ/op6tӠױ_ڶAyUp5i/F95(7n~r-S5'o W>\tϭmA۶7yv?6~G7*͛6e}؈4@?O}n˫vU$mKMH-ɷf8o\XY,뿛amƇ#obw,&xҺ9T^ n>jhWwy%?v4!ܿg7.LCZ{P[q5OA˦kz8^ !|~{Mh˶M-b~jaZwmj#11;>چǿm鷇[73 lvS[ 7{wGK{ nVmMnAдM Wg W9J?r׌Afؾ7+&Ube҇¸^oM{eE_) /2~Y+n,i^QuTE ?'yXՏon~lLδ}dcHQ/mQi?ǒ*̫6?YdUfi"WQqxqVy%Te<%$Y,yXQ?^EVz/Ĺ>,AX8yz7ls-]V~ô~n߀7o?EjqX[y[폑_ݏ6ېކ4iy?dA}H}\,쯋Ӹ%Y75+t?l*~o.yKRv9qu_$TsAeEuӹ}´/88+_y+d粑m#3'3I g+'ePa"2W|M|HWr<I:|3B™lBgQ.r"8:@{6$3b[S٘-g"x XU7 co dFMmg$w_\N|&Dɍ[jžhhgrZdu#hh7/Hr:Ze>/lڦ[߹raVZswvY%Bz8sf\ξfS.IjB8&UdE}co; dTJQjmKOti' gN] i%u4tj=T}fz" p  mp-{U*~#~zlL-c_x]@5WBg﬜+>áG=F'Fx~x]>yF2^*"1We;;WeĽ7]io̢zu2][H'{30K&ZϬ-AghfzAaCC+ʦg+ts'~~ڔ(K:nנ +1?Y0$:NSrT w;hL#hYiq6K{-8N 0:]E_H?vP65tq@sSm)˶+"zPP鸃/KGŨR9ќV3S?xE0yC}Ch 4ol;l!u_,Ϭ0H=K';,]]_S 1~ 98q rMuC: QF*Փfj>Ev>2{]G b1P0|5'26-mvk'Fx~iiz *^)-(@=󠲭*FrwlT/ܔ$cb <8*.rk##ބq>K滋I5$30ҕ唖YaWRO0O k'Fx~ʙ5T˱,ӶPg<Ȍ2W9ggtJ,x)7Pxt[◜"ag";pS"j}@>$Ǔt\9-n꫉Hz)DR]=!֬F/y&)DI}ɶp/qm23zb~©݊)N2w B8cQY녴#{UĤ)N@JRc9汭^H=1?a;5*35P@5ԇr DPNZ</FwᬨΣjdJ$ԗԮ!qm-}nVTQj֚dJ$Ԯ N,e $S"9 bWP0"k_@0ɔHN%W S ~Z~'Fx~a$S"9v0Y% L$uC8Z^[8PK2%HKKS Tjv'Fx~a3UU'gN3TJ:m_@%ɔ]XލZ@{Y=F*EY*2IDr:IBj~7F #/KvZ@3VF>1s HDr:I:.!qQXM/\z"G2W|*' a!UT"$ժOTUΊ*vU T[Ltu⇟ YD5gIώzog3TKd-TpT 9d^ЮW~ogDaDabBabHIW(xw_iDi [Qс$zu;JO-\6Q6$z*NT'pZ]X]D5@5$G,#uE.h|b~fMg I$%UO±zY$HDr:I7_b~ɔ}IbzN| LTݭjiE־%S"9}j~\#<{6%S"9ݯjؽ¹%S%S T#O6pdʮ3w׆3v׆3t׆3s׆37u…yyB{'~1ᬨΡZ]].xY__]XrrH$YEfu' kQ -OLt3CPOHJ& ]<$}a3󐄳U TLٗ46H>qW8SGR8k_U{ZYYY)NRz΂X>34C:-s*QIjWQ ߸~N}nӞq~  H'i`nG?DZ(om4wռ4vK(K@uUksJcs*Cs*3s* f$S%g{Tʳ=(2p5]3e_RZigfٹꫝ/fJ$ԗ,3~̤KUOߤ/is#ОP_}*>/Fe-\) \)ږ LǑ\BGv]>1 ;W,+[8'/lON,Lٗ>"g p֮jj$}YHJf΂J8 *+2͔]IgaΗdʮdagFp־ψQΊ{R{%>F-`/9çS %Sv%-}zvڮtC:m_V{R"Wޅv-ɈTLّLMT:M T:3*}' b.^ 5ծ&I]B_./iJcjDjDjDjH&̍өөSCOg}_>qWL$G|Y7=JEv.Y333\_K;g셳vUgfڳ;O]Z_9* g: $S%hbZJa>ɼip,\Hj~ܛكc3v~zӢ9;e 9ќR3FH>qW46->W֎ Ѹ?r[{zڭ~p2LIۭp֮jUݬ, oU˵C>ww90ͪwg*oC>y?dAetuqF. ꧬ;զ~7zeޥޥޥޥ~i~iMMֺz37.*Ө}^0oo,-Mܜo*\c:i0_ҟǩ/U4Η7Ory.NNteu^} ֟oNoh!eMp>5q7vr쏋ߟ 73N]DKbܬ a,؋nnϋ( endstream endobj 4 0 obj << /Type /Page /Parent 1 0 R /Resources<< /ProcSet[/PDF/Text] /Font << /F2 6 0 R /F3 8 0 R /F4 10 0 R /F5 12 0 R >> >> /MediaBox [0 0 595.2 841.919] /Contents [13 0 R] >> endobj 15 0 obj << /Filter [/FlateDecode] /Length 8658 >> stream xێƑ)xY )2L_ 5z_Ⱦkeiǣuk>پޒI֡3u XS,vWdFf_j?u۹&Uq]"6uLѧw`6M_=-yE'E_=h;S+Gv>w[~x廏6-{wt?޴u><~k>ϟ> _ջOU43aM}YݷnS}ն_a÷iUnkoix.aC_~!٬wn?VUO#}vu@@cݻ??fzKL?}ziQ?}}|_巇9pNO\ۛ".LSˏ>Mˢ|_8nm|ko\xh6fjB7W`v}|5la{;<=O/;m2cS凅K}ߞ^~XTfrvXm%ulv6lv1.֩eakmؼ_?/ΣM:1q'>kbf[ڇ|;}o_|}NUH yiwulW]Ύ_5N?@~8~ g???añȾ{~EklAJ}vGmҰKjs9inؠsh3>nF >ix$L69onC݁@\dmO!&\kh,sbH L9W ߋ8mc"gGs./J3ΛX[cW붑&γSަt<70ΫGq[<;6Qĝq9?|u &Ft F[{]+ =#ZzwƮiv,3W4w =L|;A\9"Z2&s[yq"]|ηEGwZG[.H"Ϸ*s#J(==Js;11+Hw =ʹhǴhⱮJ>vҭ <4OCGa;7kc\~K w-Ўu h;в!&7Ώ%gA<y1.>] >lۢhZ%d /c:{չμpXoѕ`1>ȅv?0 V. Ǵ({5NQ3͖f:70:t߂P9 tΧz?%GӁJ9qǃiAam/3bgF$9~W+6*'D&m oRM^$zk`$;C$Hx]󓌁<A{Nr#K.6+ I(9v:aQuiSǖ AApvNA3NP^ s+(_ϒP5f5fm8 C}p5p :,P-DuVod~!5~¯Ka悷q:d T>Ku.sٱDն-$cQ}[թ:U[ױMjjNPW_m3hA Uwj,Sc֩T_繯mUmjnU[5N5N5N5Ns:L}"Of[f7o}:.hBkAeUL_~կUW_o8<~5O BƱzVϚ5/kkPY__ճzVY=gճzl8|ճzVY=g/平ճzVY=gճzVY=gճzVY=z6ƏNmptm!C4hlkQY=gճzֶ5gճzV6:vճzVӶƶ9Dc[szVϚ5WsumYsչ:kpm'\zּ9Dc[c[c[suMmuDzּy[k>|չ:W\suչ:W\su˹xx MfGmM߷;ߕ֜n?ًA`ԯUW_~կUW_~կUW_~կUW_~կUW_~կUW_~կUW_~կUW_~կ_ے#Eչ:W\suչ:W\suչ:W\suչ:W\/ɹmw+X0IըUjT^p;rճzVY=gAujtutw*Q_'a0jlݑҹ;}Y_D`\Mtp?[8j"׆]e04>.W,,>H dž>݆}j0=s߀0Co_oi{Φcݦ6gmKsS}\ژ_xnlޙ6 0 .ټ5ޙ {qCۼ7mK6iC&DzjK~׸0ӆMm9mcv6ܷXXmXmƵ㹼aeKɵ!]Iǽa5;飺v.Vu#\yE5'X;ú#{qi(~~3v-coHlFAeP֍9wϝcޠk-mN+}RY:ZֵTl:O{=w[ F.|T6clS8GP ę'|Hgɽ)]$br6C1]c1bᑥ=:3^7钷 ;/*ki5r]l ` lz(\j"RJxN|w.|wp囝= "R)ɱtȕ#Wa蚓Z7_6*D>!R&0reObDBJum~\Vi5g5 X, "GuM18A&gϠ\(ƅBJYކuqr+2H$wm, <2"dq<0H3iJyݱ3Y:^W -:\l|(GsUo{4.tkD^sġ-_h0R31.,tj[>Hp^i7כ"1@F`WQۅ_d] sA*WsA9w/7gv*ZTjq`C0o:ܭ/4\`Q\v:᰿Y[zar}[u.=M|=0ATP3tT0 aIє>]gz&t84h#KǹTdtYsk,jPY|Q'-G( XijWE^*P۞03bJ9Ka,43RPKӹԄW$E Tb [ڍ#ΎubJCq't6"4A'[%`]'Pۀ?SE֝7HKԺah7[4~ÞL0Xş&-t$t9}s擵L$4 >!y:p5C{Wj|trW{bsi{-K]⩪#K}K SHG9C,qyB~p^iwX=sa)(nw ZM؝=aڗ-LLrĎ c0)f>PTPs/+?~gP&|d8rN ۶baהV(0kiՌvn 2_B E'YZZHml a)$4is,up+3Js 2DEQA\!Љ)m)q^q T,vUGRϾS tr@׫QAϩCj"{e,b~#, Ʃso=w +a#PLS:pҵa!hm7t(Nҫ> ynucT-M8D0n54gmؼnN0pDH1M g,O\vSfTu>*?4 h[ N3t$ jiOǰ AXjizUAo?JSQ ZE%F zJk_1PKӫ2͙q^?ln7MHju1PK.216[TRi4ỳO]Nj]TRi4؈a Zk6oa765 T O, #6.j]ՑV1#XeSΨ*ei0V(i3K*ZDa,=u=2QI\<"hJchBE]vM"݅noM,Ɩtss(zy]gI"U: ~ "4Z8p7c81С ,DsRNX*O串A; U:eaEV7UX(k*Dإ4UU:jHMWTRiڕ]]Cl4ҵmmzсU ?7N۩z,ԻpQpjv4I3!NXT' iԸc.jN3wxj1K U:%~AR&ݺDB-]<-DJirU<"Zҵ_]) /XRi4z(E-%gm+0ֶ[m+pն´:],`㺄Dүci¢JS5) 7nL4b."G&,tJP]_cwu<ޕ#G}"[XT#p!R&Ss|C9 TTP铠I)M2ִ8KP@AO.t*.$hPU11uh_rGOqJ-=GV,( V`Um8*ŘzPKϑr9*Wq]"[h除j9Vˎz E44tKR8TcR%*UbR%Abj"[45)XPQ:"ILB>["HU~)OO^4ZzPJ*DFiBE]zO~LNi>E-=G~P2cp<2էqNu >E]zL)p\6@KD,pm`` VXOUh\閤8*[+[*[~&"ci;4Q ,Ա::VGPXc,i:c TNXTN6|+Q@Kώ>T]J|+s^ *( Х疤 *}WPE t07h陣 *}]a.=[ 6tW `.=;'z t[d3Ё_~lTg #,¯dПv?k~ү9I Zdx SnLv,=GkLL=d5*hN,sX푏4}>tү%3A>FY'Z'Z'ZŮJ[G']z$'\_dEdEdQgV 6rF~U-Z-ZYdUkC P#!8Sڋ 9OPP1=*heUDZ ߎd_5q Ƿ +P0@;*TqL:ugǺ.QUHq[ _=ճB9V'z{dO5<Sk}$ϴ>$T~+@Fi1| f<ɮxPh鹻ԍs醁qK6t2@K.S2vAQKSW*+c/X, td-t`ҫJ-=ӴI[*6TW1lҭ!1@-U?6nCt1懎/?$Zs#xQg)zt^dZظ[e+K϶i}> YՅN|³3u/-܆?vu}XںT}aTW~?*milu\j]hG/ۦwj[틭v¥۹߹5wnvnMCO3 ? ooxycoկîݴwKuۄym{_GZ]ӡHշ˯[{?4wwCqꪞ{;KqQwfY.6lzq)K endstream endobj 14 0 obj << /Type /Page /Parent 1 0 R /Resources<< /ProcSet[/PDF/Text] /Font << /F3 8 0 R /F2 6 0 R /F5 12 0 R /F4 10 0 R >> >> /MediaBox [0 0 595.2 841.919] /Contents [15 0 R] >> endobj 17 0 obj << /Filter [/FlateDecode] /Length 9035 >> stream x՝rƑ)pٌ!T¶Ž]/#|!r6`h-PdMψ ̪Ŀ?__&mnZRsf\j^-bƜ}MnO}cB׋}7}ï_\4WX)0|o l_Y>>4yuw{aï/o滋7W?/Ly/i{_~Z?_n>^I݇4rwaK./L\w͛R1m 5w~wq-W_n/o_~yCSY.mW% ̦|W~w-Go_~onK}xSMX~i}hώϷov˱ɮjpهLg2ol9aӌ1ߌ'4lNg9e^ wkL\XS|y`o^ o-&0z궜8w~ϥ-훦< gt1|_~}]П߷7?_Գ9͟?^;SmF/˷1ۿmﶶw~ oޔ/557Mӧ,+W #/o>۫~~_ C ni]5+|W>~x{=_>\~`ZW~hv?nǫEemצ{9ڡZ7X|R. ]c4X m~[7?.D.qxb|\ۛ G0tJA7޼R_^?yi:P ያ#P~2(izny8hûG7L?o`G_xط?|t`; ݼ}:yjy,{FoCM0%1ilM\l%7N+Ə W{Y/4\+b4]#C|ԟM?M=Y_ToM.sۡW/y!l?G? q%W\Ї֯6=M+ִ! f F^zt'y}-VzufB>ޛLB;'=)o|2!xԳ<=M+5DxʃӅ2#Kn  09M `iڣbiY[8s?s)228eo=W4(=$>\CȰkH~6K*+n:;x }lmiz|I0Dp ߄n /.bD}A .fYˁ]'70[h۲R: BjeUʴ‘]:O 'Rϝj%9xb9e M99gh\vx{ _2D')^:O5=`R`kcǝzVD'̹u'sPg}xa(CV5cMJYbE͕EBümWv>w(E#m eFU;!e=<4AF褹t[ R{eISAJ6Q;,1{aX"66]UDZX:X众DJir*)m3UFljzcBK\3 ;DqZ^w%m6X3} Y-<̴'keGhaj%!3PtT?.—(OF>ǝQx4@o!x*44[tTEQ;aǑǀƕn"ֲ XA Q;a-@EqT@Qw`)wDPWSl=@KSh~qUPgdNSi"X#9[NI J:gI|ـc͸a"I{SgV=Ʃp@~k}={љ)XKۥNRzt)rri Ǟxt`w*= XH}U:=O3GCӐUaZy3{t,>ma{7 8vފ5C~Qci{1Nh`Vwic#LRxLSfގ #Bj 1$X5 p;ѺiJi) ;>tOYuTWƈŧ |k>:0C2+~KfVfC '2JSg% Uo~k *}Nb UoT@s=W-{gTxb6T-W-Ş(vHuJQIzY8;=Wŝ=m=GUgk* ]'-CN8qlhgOn#R3|>Cс_* GUg(3=?! v }zö2ZzN m6JLUYhk[w\go:mqĭ`AO<;ՒڢT"?ЭQZ)XP3f| hy7 l191#>KGq9L1(p\.E-=uh|<>!qR!j'jиm$DEHTdDEHTm;ǟi_pcMQ@Ky߈[+H)ӭK 3@k`AO(lHT*g(6tkTxaTk1+TPӑŬЁ_ 9jEg*)M ֞IW#z)ͰW nL ֞[aJ ?#Y%8"Y2ڳ/~=aFEۭ8}G>Xnd7e[%(b-CuS0%& pfX݆Hw"7ŴPPi6v"I[M OB~cSb`MԦx6̊Zz]aQ[$YEUba蚎R.1A?) >jL;*z66t{GȨ-LAAO( hITNMK=~t Q[OPPUb֞9VVj|Q*hٓDPAOJظm(!HUQUQMQhWp`~ HeI\QLeQeQXUv~+TPe|jTP)☪T;KTgljи-G,Ttdt`>(-zj5'W]y *+/X+#Y+# w1@Kωd*ɷnZ{&h%VheAe_eٳTYU=Ae_eٳTYU=ceWe٣/+~=KueQ]ٳTWՕ}OB>QwTTq[Q[d`j_׳5}l-ߵZ{&V7UyU>_!8e8,v[&E1@kaesaGpүmS|@FihEۡqZxvq:kENqۘg<m\`=͛W<=Oɭ^^,˶ >Ԛ;n ޾]WԍGm*ccݼ^1c7>/4F.4ab}OmQn~sptԭLwƕtq<mmq9.};әO-}w_6nqyOť_^4?/Wß~ˏËa/.\eǫZӥCo͇E|"ւ+o0tkM0]-24ez36=t\q:~ܺc-$]s7k;6MrYytr[R+ #wW`+}miCzGqʴ4Ô9w-5u.6m;zn Y1k=ώv~5h/ڇuAn6wkC6Ibxr''rVU)wXȬj_Pg#v=šPg#3QݪmYðx'=ڂ;u ]-ƮJi ᱖fZ8DHmNd1-OKG֎d:rm Øҵ/ 8uZ{Oft3vM6i4e1=NyXa~˅$v~K Y:%!4^#tu_4uNv >%Ve`K3&ni Ku+KEfKBuL1Ghه|dƻ.tn =VzZ갳 xbI8+* M'͙ΞjB-W[ß?4Twa P+s/A~K 3˺۠ut S,I&b[[.\#C2@kc2*qk;]-7aǍ5 S m<{Ɲz>6\RO'[,%4.ah@]'kh(#2Z΍1]}8,z IO2 U; VtqVׁ26}X-|X3Q1bgt11˺I+l򂲣 V{-'Flbi8ԭ.Yxd&M+F'ֆs P`Q<_uq#]ni;7= &Z tT_vk 01˴޾R7C~KY'N [v޵a+ \*581juER`k=S5nH}Yio* 'ѢsD¢jcr Ebi{-v}{rr:kǽcOw IMi· Tp`74{y,tZ :V-zoggo&CL &vMJ &`ϦH0pE&K& sMynM\#lww#kǮ'iiHW-DJm2Y8v]?BCwŁ]qENMݚ3n6%z;v3IENcQI*urF]wb T`K`:F8GtX-y&TBK:j@;3 lhp`7?{<[RSkhS_`QӐ{?"4j: : =&~/G֎SìE XJU;M0DJi2M)$[f|%WRHt[GL$¢juY֮`*;S̃q`kd )ICB~Yr_i6\Qq|ddpQS""ȖGZ "Rj0l% )I7#h)I Teyq`Kמ&煋28k:KriΜW`yW`yW yEWyEI$]+eO+dC+g$\T4Sd2Af+sf2cf+d2If+t{8k.vJ %Fs_ҵaK2f8k:k90.XJ|/K U;%vARJ=Y X%vARjl.|ddpQSl"ȒEN=e[&)I)ML`L`L D2)K&d`"g&2f`"[&re`~l&X2'l7T9EN gf ohr )[3|d%߀*5$A6Rl%zpdOEENI(c[vM1[G1֮tk,\TԓD@~Y0)AR K`"Hy ly ,HyHSk U;efElVf%lVf%lVŁ-]g|WfweL&Τbb*l%{*=>q=><><~%@RJ NONO|NOlNY۸Ώ#v ,;"vMRd'&U'U'U$aIiZi؏8ʙ@3$DDԄ`$0yY9ڗڗ MU;e\Qۗ MlU:xV_.)cuOU;,2tLul[y50tZCU; |9DP;Rt4ϩFN t,c7eYX,WX|' Ǯ^/Ese\ ji7eMLzp4}vf[riBl-//LcЕsQKAmb ?LL}(ϿxUQ|WX ..?Ւc endstream endobj 16 0 obj << /Type /Page /Parent 1 0 R /Resources<< /ProcSet[/PDF/Text] /Font << /F3 8 0 R /F2 6 0 R /F5 12 0 R /F4 10 0 R >> >> /MediaBox [0 0 595.2 841.919] /Contents [17 0 R] >> endobj 1 0 obj << /Type /Pages /Count 3 /Kids [ 4 0 R 14 0 R 16 0 R ] >> endobj 2 0 obj << /Type /Catalog /Pages 1 0 R >> endobj xref 0 18 0000000000 65535 f 0000031121 00000 n 0000031194 00000 n 0000000015 00000 n 0000012716 00000 n 0000000175 00000 n 0000000433 00000 n 0000000847 00000 n 0000001105 00000 n 0000001440 00000 n 0000001693 00000 n 0000002188 00000 n 0000002440 00000 n 0000002579 00000 n 0000021636 00000 n 0000012902 00000 n 0000030934 00000 n 0000021823 00000 n trailer << /Size 18 /Info 3 0 R /Root 2 0 R /ID [<6147817E9DDD1665161F27C0BD20BADB><6147817E9DDD1665161F27C0BD20BADB>] >> startxref 31243 %%EOF flexi_logger-0.29.8/examples/colors.rs000064400000000000000000000050761046102023000160670ustar 00000000000000fn main() { #[cfg(not(feature = "colors"))] println!("Feature color is switched off"); #[cfg(feature = "colors")] { use nu_ansi_term::Color; use std::io::IsTerminal; for i in 0..=255 { println!("{}: {}", i, Color::Fixed(i).paint(i.to_string())); } println!(); if std::io::stdout().is_terminal() { println!( "Stdout is considered a tty - \ flexi_logger::AdaptiveFormat will use colors", ); } else { println!( "Stdout is not considered a tty - \ flexi_logger::AdaptiveFormat will NOT use colors" ); } if std::io::stderr().is_terminal() { println!( "Stderr is considered a tty - \ flexi_logger::AdaptiveFormat will use colors", ); } else { println!( "Stderr is not considered a tty - \ flexi_logger::AdaptiveFormat will NOT use colors!" ); } #[cfg(target_os = "windows")] if nu_ansi_term::enable_ansi_support().is_err() { println!("Unsupported windows console detected, coloring will likely not work"); } println!( "\n{}", Color::Fixed(196) .bold() .paint("err! output (red) with default palette") ); println!( "{}", Color::Fixed(208) .bold() .paint("warn! output (yellow) with default palette") ); println!("info! output (normal) with default palette"); println!( "{}", Color::Fixed(7).paint("debug! output (normal) with default palette") ); println!( "{}", Color::Fixed(8).paint("trace! output (grey) with default palette") ); println!( "\n{}", Color::Red .bold() .paint("err! output (red) with env_logger-palette") ); println!( "{}", Color::Yellow.paint("warn! output (yellow) with env_logger-palette") ); println!( "{}", Color::Green.paint("info! output (green) with env_logger-palette") ); println!( "{}", Color::Blue.paint("debug! output (blue) with env_logger-palette") ); println!( "{}", Color::Cyan.paint("trace! output (cyan) with env_logger-palette") ); } } flexi_logger-0.29.8/examples/colors2.rs000064400000000000000000000072621046102023000161500ustar 00000000000000// #[cfg(feature = "colors")] // use colored::{Color, ColoredString, Colorize}; // use flexi_logger::color_from_ansi_code; // fn main() { // #[cfg(not(feature = "colors"))] // println!("Feature color is switched off"); // #[cfg(feature = "colors")] // { // use std::io::IsTerminal;; // colored::control::set_override(true); // for r in [0, 95, 135, 175, 215, 255] { // for g in [0, 95, 135, 175, 215, 255] { // for b in [0, 95, 135, 175, 215, 255] { // println!( // " rgb = ({:3}, {:3}, {:3}), {}", // r, // g, // b, // "hello".truecolor(r, g, b) // ); // } // } // } // for i in 0..=255 { // print!("{}: {}", i, nu_ansi_term::Color::Fixed(i).paint(i.to_string())); // println!("{}: {}", i, i.to_string().color(color_from_ansi_code(i))); // } // println!(); // if std::io::stdout().is_terminal() { // println!( // "Stdout is considered a tty - \ // flexi_logger::AdaptiveFormat will use colors", // ); // } else { // println!( // "Stdout is not considered a tty - \ // flexi_logger::AdaptiveFormat will NOT use colors" // ); // } // if std::io::stderr().is_terminal() { // println!( // "Stderr is considered a tty - \ // flexi_logger::AdaptiveFormat will use colors", // ); // } else { // println!( // "Stderr is not considered a tty - \ // flexi_logger::AdaptiveFormat will NOT use colors!" // ); // } // #[cfg(target_os = "windows")] // if nu_ansi_term::enable_ansi_support().is_err() { // println!("Unsupported windows console detected, coloring will likely not work"); // } // println!( // "\n{}", // "err! output (red) with default palette" // .color(color_from_ansi_code(196)) // .bold() // ); // println!( // "{}", // "warn! output (yellow) with default palette" // .color(color_from_ansi_code(208)) // .bold() // ); // println!("info! output (normal) with default palette"); // println!( // "{}", // "debug! output (normal) with default palette" // .color(color_from_ansi_code(7)) // .bold() // ); // println!( // "{}", // "trace! output (grey) with default palette" // .color(color_from_ansi_code(8)) // .bold() // ); // println!("\n{}", "err! output (red) with env_logger-palette".red()); // println!( // "{}", // "warn! output (yellow) with env_logger-palette".yellow() // ); // println!("{}", "info! output (green) with env_logger-palette".green()); // println!("{}", "debug! output (blue) with env_logger-palette".blue()); // println!("{}", "trace! output (cyan) with env_logger-palette".cyan()); // } } flexi_logger-0.29.8/examples/dedup.rs000064400000000000000000000255041046102023000156650ustar 00000000000000use flexi_logger::{ filter::{LogLineFilter, LogLineWriter}, DeferredNow, }; use log::Record; use std::{cmp::Ordering, num::NonZeroUsize, sync::Mutex}; fn main() { #[cfg(feature = "colors")] let format = flexi_logger::colored_detailed_format; #[cfg(not(feature = "colors"))] let format = flexi_logger::detailed_format; flexi_logger::Logger::try_with_str("info") .unwrap() .format(format) .log_to_stdout() .filter(Box::new(DedupWriter::with_leeway( std::num::NonZeroUsize::new(2).unwrap(), ))) .start() .unwrap(); for i in 0..10 { log::info!("{}", if i == 5 { "bar" } else { "foo" }); } log::info!("the end"); } /// A helper to skip duplicated consecutive log lines. pub struct DedupWriter { deduper: Mutex, } impl DedupWriter { /// Constructs a new [`Deduper`] that will skip duplicated entries after /// some record has been received for the consecutive times specified by /// `leeway`. pub fn with_leeway(leeway: NonZeroUsize) -> Self { Self { deduper: Mutex::new(Deduper::with_leeway(leeway)), } } } impl LogLineFilter for DedupWriter { fn write( &self, now: &mut DeferredNow, record: &Record, log_line_writer: &dyn LogLineWriter, ) -> std::io::Result<()> { let mut deduper = self.deduper.lock().unwrap(); let dedup_action = deduper.dedup(record); match dedup_action { DedupAction::Allow => { // Just log log_line_writer.write(now, record) } DedupAction::AllowLastOfLeeway(_) => { // Log duplicate log_line_writer.write(now, record)?; // Log warning log_line_writer.write( now, &log::Record::builder() .level(log::Level::Warn) .file_static(Some(file!())) .line(Some(line!())) .module_path_static(Some("flexi_logger")) .target("flexi_logger") .args(format_args!( "last record has been repeated consecutive times, \ following duplicates will be skipped...", )) .build(), ) } DedupAction::AllowAfterSkipped(skipped) => { // Log summary of skipped log_line_writer.write( now, &log::Record::builder() .level(log::Level::Info) .file_static(Some(file!())) .line(Some(line!())) .module_path_static(Some("flexi_logger")) .target("flexi_logger") .args(format_args!("last record was skipped {skipped} times")) .build(), )?; // Log new record log_line_writer.write(now, record) } DedupAction::Skip => Ok(()), } } } // A helper to track duplicated consecutive logs and skip them until a // different event is received. struct Deduper { leeway: NonZeroUsize, last_record: LastRecord, duplicates: usize, } /// Action to be performed for some record. #[derive(Debug, PartialEq, Eq)] enum DedupAction { /// The record should be allowed and logged normally. Allow, /// The record is the last consecutive duplicate to be allowed. /// /// Any following duplicates will be skipped until a different event is /// received (or the duplicates count overflows). AllowLastOfLeeway(usize), /// The record should be allowed, the last `N` records were skipped as /// consecutive duplicates. AllowAfterSkipped(usize), /// The record should be skipped because no more consecutive duplicates /// are allowed. Skip, } impl Deduper { // Constructs a new [`Deduper`] that will skip duplicated entries after // some record has been received for the consecutive times specified by // `leeway`. pub fn with_leeway(leeway: NonZeroUsize) -> Self { Self { leeway, last_record: LastRecord { file: None, line: None, msg: String::new(), }, duplicates: 0, } } /// Returns wether a record should be skipped or allowed. /// /// See [`DedupAction`]. fn dedup(&mut self, record: &Record) -> DedupAction { let new_line = record.line(); let new_file = record.file(); let new_msg = record.args().to_string(); if new_line == self.last_record.line && new_file == self.last_record.file.as_deref() && new_msg == self.last_record.msg { // Update dups count if let Some(updated_dups) = self.duplicates.checked_add(1) { self.duplicates = updated_dups; } else { let skipped = self.duplicates - self.leeway(); self.duplicates = 0; return DedupAction::AllowAfterSkipped(skipped); } match self.duplicates.cmp(&self.leeway()) { Ordering::Less => DedupAction::Allow, Ordering::Equal => DedupAction::AllowLastOfLeeway(self.leeway()), Ordering::Greater => DedupAction::Skip, } } else { // Update last record self.last_record.file = new_file.map(ToOwned::to_owned); self.last_record.line = new_line; self.last_record.msg = new_msg; let dups = self.duplicates; self.duplicates = 0; match dups { n if n > self.leeway() => DedupAction::AllowAfterSkipped(n - self.leeway()), _ => DedupAction::Allow, } } } fn leeway(&self) -> usize { self.leeway.get() } } struct LastRecord { file: Option, line: Option, msg: String, } #[cfg(test)] mod tests { use super::*; #[test] fn test_record_eq() { let leeway = NonZeroUsize::new(1).unwrap(); let msg = format_args!("b"); let mut deduper = Deduper::with_leeway(leeway); let record = Record::builder() .file(Some("a")) .line(Some(1)) .args(msg) .build(); let diff_file = Record::builder() .file(Some("b")) .line(Some(1)) .args(msg) .build(); let diff_line = Record::builder() .file(Some("b")) .line(Some(2)) .args(msg) .build(); let diff_msg = Record::builder() .file(Some("b")) .line(Some(2)) .args(format_args!("diff msg")) .build(); // First one is allowed assert_eq!(deduper.dedup(&record), DedupAction::Allow); // Second one is allowed because it comes from a diff file assert_eq!(deduper.dedup(&diff_file), DedupAction::Allow); // Third one is allowed because it comes from a diff line assert_eq!(deduper.dedup(&diff_line), DedupAction::Allow); // Fourth one is allowed because it has a diff msg assert_eq!(deduper.dedup(&diff_msg), DedupAction::Allow); } #[test] fn test_within_leeway_and_reset() { let leeway = NonZeroUsize::new(2).unwrap(); let mut deduper = Deduper::with_leeway(leeway); let record_a = Record::builder() .file(Some("a")) .line(Some(1)) .args(format_args!("b")) .build(); let record_b = Record::builder() .file(Some("b")) .line(Some(1)) .args(format_args!("b")) .build(); // All should be allowed as they are within leeway and dups are reset assert_eq!(deduper.dedup(&record_a), DedupAction::Allow); assert_eq!(deduper.dedup(&record_a), DedupAction::Allow); assert_eq!(deduper.dedup(&record_b), DedupAction::Allow); assert_eq!(deduper.dedup(&record_b), DedupAction::Allow); assert_eq!(deduper.dedup(&record_a), DedupAction::Allow); assert_eq!(deduper.dedup(&record_a), DedupAction::Allow); } #[test] fn test_leeway_warning() { let leeway = NonZeroUsize::new(4).unwrap(); let mut deduper = Deduper::with_leeway(leeway); let dup = Record::builder() .file(Some("a")) .line(Some(1)) .args(format_args!("b")) .build(); // First one should be allowed assert_eq!(deduper.dedup(&dup), DedupAction::Allow); // Silently allow the same log as long as leeway isn't met for _ in 0..(deduper.leeway() - 1) { assert_eq!(deduper.dedup(&dup), DedupAction::Allow); } // Allow last one within the leeway with a warning assert_eq!( deduper.dedup(&dup), DedupAction::AllowLastOfLeeway(deduper.leeway()) ); } #[test] fn test_dups() { let mut deduper = Deduper::with_leeway(NonZeroUsize::new(1).unwrap()); let dup = Record::builder() .file(Some("a")) .line(Some(1)) .args(format_args!("b")) .build(); let new_record = Record::builder() .file(Some("a")) .line(Some(1)) .args(format_args!("c")) .build(); // First one should be allowed assert_eq!(deduper.dedup(&dup), DedupAction::Allow); // Second one should be the last one allowed because of the leeway assert_eq!(deduper.dedup(&dup), DedupAction::AllowLastOfLeeway(1)); // Third one should be skipped assert_eq!(deduper.dedup(&dup), DedupAction::Skip); // A new log would be allowed with the summary of the skipped ones assert_eq!( deduper.dedup(&new_record), DedupAction::AllowAfterSkipped(1) ); } #[test] fn test_overflowed_dups() { let mut deduper = Deduper::with_leeway(NonZeroUsize::new(1).unwrap()); let dup = Record::builder() .file(Some("a")) .line(Some(1)) .args(format_args!("b")) .build(); // Bring dups to the edge of overflow deduper.duplicates = usize::MAX; // One more dup would overflow the usize, so next one is allowed assert_eq!( deduper.dedup(&dup), DedupAction::AllowAfterSkipped(usize::MAX - deduper.leeway()) ); // Dups are reset, next one is allowed as last under leeway assert_eq!( deduper.dedup(&dup), DedupAction::AllowLastOfLeeway(deduper.leeway()) ); assert_eq!(deduper.duplicates, 1); } } flexi_logger-0.29.8/examples/entry_numbers.rs000064400000000000000000000014531046102023000174550ustar 00000000000000use flexi_logger::{DeferredNow, FlexiLoggerError, Logger}; use log::*; use std::sync::atomic::{AtomicU32, Ordering}; // Produces // 1 INFO [entry_numbers] first // 2 WARN [entry_numbers] second // 3 ERROR [entry_numbers] third fn main() -> Result<(), FlexiLoggerError> { Logger::try_with_str("info")?.format(my_format).start()?; info!("first"); warn!("second"); error!("third"); Ok(()) } pub fn my_format( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { static LINE: AtomicU32 = AtomicU32::new(1); write!( w, "{:>6} {} [{}] {}", LINE.fetch_add(1, Ordering::Relaxed), record.level(), record.module_path().unwrap_or(""), record.args() ) } flexi_logger-0.29.8/examples/filter.rs000064400000000000000000000014251046102023000160450ustar 00000000000000use flexi_logger::{ filter::{LogLineFilter, LogLineWriter}, DeferredNow, FlexiLoggerError, }; fn main() -> Result<(), FlexiLoggerError> { flexi_logger::Logger::try_with_str("info")? .filter(Box::new(BarsOnly)) .start()?; log::info!("barista"); log::info!("foo"); // will be swallowed by the filter log::info!("bar"); log::info!("gaga"); // will be swallowed by the filter Ok(()) } pub struct BarsOnly; impl LogLineFilter for BarsOnly { fn write( &self, now: &mut DeferredNow, record: &log::Record, log_line_writer: &dyn LogLineWriter, ) -> std::io::Result<()> { if record.args().to_string().contains("bar") { log_line_writer.write(now, record)?; } Ok(()) } } flexi_logger-0.29.8/examples/performance.rs000064400000000000000000000023271046102023000170630ustar 00000000000000use std::fmt; use std::time::Instant; struct Struct { data: [u8; 32], } impl fmt::Display for Struct { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self.data) } } fn main() { // -------------------------------- println!("flexi_logger"); flexi_logger::Logger::try_with_str("off") .unwrap() .format(flexi_logger::detailed_format) .start() .unwrap(); // -------------------------------- // $> Set-Item -Path Env:RUST_LOG -Value "trace" // println!("env_logger"); // env_logger::init(); // $> Set-Item -Path Env:RUST_LOG // -------------------------------- let mut structs = Vec::new(); for i in 0..100 { structs.push(Struct { data: [i as u8; 32], }); } { // With format let start = Instant::now(); for s in &structs { log::info!("{}", format!("{s}")); } eprintln!("with format: {:?}", start.elapsed()); // 2-7ms } { // Plain logger let start = Instant::now(); for s in &structs { log::info!("{}", s); } eprintln!("plain: {:?}", start.elapsed()); // 17-26ms } } flexi_logger-0.29.8/examples/rotate.rs000064400000000000000000000013651046102023000160610ustar 00000000000000use flexi_logger::{ Age, Cleanup, Criterion, Duplicate, FileSpec, FlexiLoggerError, LevelFilter, Logger, Naming, }; use std::{thread::sleep, time::Duration}; fn main() -> Result<(), FlexiLoggerError> { Logger::with(LevelFilter::Info) .rotate( Criterion::Age(Age::Second), Naming::TimestampsCustomFormat { current_infix: None, format: "%Y%m%d_%H%M%S", }, Cleanup::Never, ) .log_to_file(FileSpec::default()) .duplicate_to_stdout(Duplicate::All) .start()?; log::info!("start"); for step in 0..10 { log::info!("step {}", step); sleep(Duration::from_millis(250)); } log::info!("done"); Ok(()) } flexi_logger-0.29.8/examples/version_numbers.rs000064400000000000000000000001571046102023000200010ustar 00000000000000use version_sync::assert_markdown_deps_updated; fn main() { assert_markdown_deps_updated!("README.md"); } flexi_logger-0.29.8/examples/write_writer.rs000064400000000000000000000014611046102023000173060ustar 00000000000000use flexi_logger::writers::LogWriter; use std::{ io::{Error, ErrorKind}, sync::{Arc, Mutex}, }; fn main() {} #[allow(dead_code)] struct MyWriter { file: Arc>, } impl LogWriter for MyWriter { fn write( &self, now: &mut flexi_logger::DeferredNow, record: &flexi_logger::Record, ) -> std::io::Result<()> { let mut file = self .file .lock() .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?; flexi_logger::default_format(&mut *file, now, record) } fn flush(&self) -> std::io::Result<()> { let mut file = self .file .lock() .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?; file.flush() } } flexi_logger-0.29.8/scripts/check.rs000075500000000000000000000034261046102023000155140ustar 00000000000000#!/usr/bin/env rust-script //! ```cargo //! [dependencies] //! yansi = "0.5" //! ``` extern crate yansi; use std::process::Command; macro_rules! run_command { ($cmd:expr , $($arg:expr),*) => ( let mut command = command!($cmd, $($arg),*); let mut child = command.spawn().unwrap(); let status = child.wait().unwrap(); if !status.success() { print!("> {}",yansi::Paint::red("qualify terminates due to error")); std::process::exit(-1); } ) } macro_rules! command { ($cmd:expr , $($arg:expr),*) => ( { print!("\n> {}",yansi::Paint::yellow($cmd)); let mut command = Command::new($cmd); $( print!(" {}",yansi::Paint::yellow(&$arg)); command.arg($arg); )* print!("\n"); command } ) } // fn run_script(s: &str) { // let mut path = std::path::PathBuf::from(std::env::var("CARGO_SCRIPT_BASE_PATH").unwrap()); // path.push(s); // let script = path.to_string_lossy().to_owned().to_string(); // run_command!("cargo", "script", script); // } fn main() { // Check in important variants run_command!("cargo", "check"); run_command!("cargo", "check", "--all-features"); run_command!("cargo", "check", "--no-default-features"); run_command!("cargo", "check", "--features= specfile"); run_command!("cargo", "check", "--features= trc"); // Clippy in important variants run_command!("cargo", "clippy", "--all-features", "--", "-D", "warnings"); // doc #[rustfmt::skip] run_command!("cargo", "+nightly", "doc", "--all-features", "--no-deps", "--open"); // say goodbye println!("\n> checks are done :-) Looks like you're ready to do the full qualification?"); } flexi_logger-0.29.8/scripts/cleanup.rs000075500000000000000000000033221046102023000160610ustar 00000000000000#!/usr/bin/env rust-script //! Cleans up all files and folders that were produced by test runs. //! //! ```cargo //! [dependencies] //! glob = "*" //! ``` extern crate glob; fn main() { for pattern in &[ "./*.alerts", "./*.log", "./*.seclog", "./*logspec.toml", "./log_files/**/.DS_Store", "./log_files/**/test_restart_with_no_suffix-*", "./log_files/**/*.alerts", "./log_files/**/*.csv", "./log_files/**/*.err", "./log_files/**/*.gz", "./log_files/**/*.log", "./log_files/**/*.seclog", "./log_files/**/*.toml", "./server/**/*.toml", ] { for globresult in glob::glob(pattern).unwrap() { match globresult { Err(e) => eprintln!("Evaluating pattern {:?} produced error {}", pattern, e), Ok(pathbuf) => { std::fs::remove_file(&pathbuf).unwrap(); } } } } for dir_pattern in ["./log_files/**", "./server/**"] { let dirs: Vec = glob::glob(dir_pattern) .unwrap() .filter_map(|r| match r { Err(e) => { eprintln!("Searching for folders produced error {}", e); None } Ok(_) => Some(r.unwrap()), }) .collect(); for pathbuf in dirs.iter().rev() { std::fs::remove_dir(&pathbuf).expect(&format!("folder not empty? {:?}", pathbuf)); } } std::fs::remove_dir("./log_files/").ok(); std::fs::remove_dir("./server/").ok(); std::fs::remove_file("./link_to_log").ok(); std::fs::remove_file("./link_to_mt_log").ok(); } flexi_logger-0.29.8/scripts/qualify.rs000075500000000000000000000061041046102023000161050ustar 00000000000000#!/usr/bin/env rust-script //! ```cargo //! [dependencies] //! yansi = "0.5" //! ``` extern crate yansi; use std::process::Command; macro_rules! run_command { ($cmd:expr) => { let mut command = command!($cmd); let mut child = command.spawn().unwrap(); let status = child.wait().unwrap(); if !status.success() { println!( "{} in {}", yansi::Paint::red("qualify terminates due to error"), yansi::Paint::yellow($cmd) ); std::process::exit(-1); } }; } macro_rules! command { ($cmd:expr) => {{ print!("\n> {}\n", yansi::Paint::yellow($cmd)); let mut chips = $cmd.split(' '); let mut command = Command::new(chips.next().unwrap()); for chip in chips { command.arg(chip); } command }}; } fn run_script(s: &str) { let mut path = std::path::PathBuf::from("./scripts"); path.push(s); let command = format!( "cargo script {}", path.to_string_lossy().to_owned().to_string() ); run_command!(&command); } fn main() { println!("Qualify flexi_logger"); run_command!("date"); // format run_command!("cargo fmt"); // Build in important variants std::fs::remove_file("Cargo.lock").ok(); run_command!("cargo +1.72.0 build --no-default-features"); run_command!("cargo +1.72.0 build --all-features"); std::fs::remove_file("Cargo.lock").ok(); run_command!("cargo build"); run_command!("cargo build --no-default-features"); run_command!("cargo build --all-features"); run_command!("cargo build --release"); run_command!("cargo build --release --all-features"); // Clippy in important variants run_command!("cargo clippy -- -D warnings"); run_command!("cargo clippy --all-features -- -D warnings"); run_command!("cargo +nightly clippy --all-targets --all-features -- -D warnings"); // Run tests in important variants run_command!("cargo +1.72.0 test --all-features"); run_command!("cargo test --release --all-features"); run_command!("cargo test --no-default-features"); run_command!("cargo test --release"); run_command!("cargo test --release --features specfile_without_notification"); // doc run_command!("cargo +nightly test --all-features --doc"); run_command!("cargo +nightly doc --all-features --no-deps --open"); // check version consistency run_command!("cargo run --example version_numbers"); // check git status let mut cmd = command!("git status -s"); let child = cmd.stdout(std::process::Stdio::piped()).spawn().unwrap(); let output = child.wait_with_output().unwrap(); if output.stdout.len() > 0 { print!("> {}", yansi::Paint::red("there are unsubmitted files")); std::process::exit(-1); } // say goodbye println!( "\n\ > all done :-) Looks like you're ready to\n\ - \"git push\"\n\ - check if the github actions were successful, and then\n\ - \"cargo publish\"" ); // cleanup run_script("cleanup"); } flexi_logger-0.29.8/scripts/qualify_fast.rs000075500000000000000000000030561046102023000171250ustar 00000000000000#!/usr/bin/env rust-script //! ```cargo //! [dependencies] //! yansi = "0.5" //! ``` extern crate yansi; use std::process::Command; macro_rules! run_command { ($cmd:expr) => { let mut command = command!($cmd); let mut child = command.spawn().unwrap(); let status = child.wait().unwrap(); if !status.success() { print!("> {}", yansi::Paint::red("qualify terminates due to error")); std::process::exit(-1); } }; } macro_rules! command { ($cmd:expr) => {{ print!("\n> {}\n", yansi::Paint::yellow($cmd)); let mut chips = $cmd.split(' '); let mut command = Command::new(chips.next().unwrap()); for chip in chips { command.arg(chip); } command }}; } fn run_script(s: &str) { let mut path = std::path::PathBuf::from("./scripts"); path.push(s); let command = format!( "cargo script {}", path.to_string_lossy().to_owned().to_string() ); run_command!(&command); } fn main() { // Build in important variants run_command!("cargo build --release --all-features"); // Clippy in important variants run_command!("cargo clippy --all-features -- -D warnings"); // Run tests in important variants run_command!("cargo test --release --all-features"); run_script("cleanup"); // doc run_command!("cargo doc --all-features --no-deps --open"); // say goodbye println!( "\n> fast qualification is done :-) Looks like you're ready to do the full qualification?" ); } flexi_logger-0.29.8/src/.markdownlint.json000064400000000000000000000003441046102023000166440ustar 00000000000000{ "MD041": { "level": 2 }, "MD013": { "line_length": 100, "heading_line_length": 300 }, "MD033": { "allowed_elements": [ "span", "br" ] } } flexi_logger-0.29.8/src/code_examples.md000064400000000000000000000457251046102023000163300ustar 00000000000000## Contents - [Start minimally: Initialize, and write logs to stderr](#start-minimally-initialize-and-write-logs-to-stderr) - [Choose the log output channel](#choose-the-log-output-channel) - [Choose the write mode](#choose-the-write-mode) - [Influence the location and name of the log file](#influence-the-location-and-name-of-the-log-file) - [Specify the format for the log lines explicitly](#specify-the-format-for-the-log-lines-explicitly) - [Use a fixed log file, and truncate or append the file on each program start](#use-a-fixed-log-file-and-truncate-or-append-the-file-on-each-program-start) - [Rotate the log file](#rotate-the-log-file) - [Reconfigure the log specification programmatically](#reconfigure-the-log-specification-programmatically) - [Reconfigure the log specification dynamically by editing a spec-file](#reconfigure-the-log-specification-dynamically-by-editing-a-spec-file) - [Reconfigure the file log writer](#reconfigure-the-file-log-writer) - [External file rotators](#external-file-rotators) - [Miscellaneous](#miscellaneous) ## Start minimally: Initialize, and write logs to stderr Initialize by choosing one of three options to specify which log output you want to see, and call `start()` immediately: - Provide the log specification in the environment variable `RUST_LOG`: ```rust # use flexi_logger::{Logger,FlexiLoggerError}; # fn main() -> Result<(), FlexiLoggerError> { Logger::try_with_env()?.start()?; # Ok(())} ``` - Provide the log specification programmatically: ```rust # use flexi_logger::{Logger,FlexiLoggerError}; # fn main() -> Result<(), FlexiLoggerError> { Logger::try_with_str("info")?.start()?; # Ok(())} ``` - Combine both options, with env having precendence over the given parameter value: ```rust # use flexi_logger::{Logger,FlexiLoggerError}; # fn main() -> Result<(), FlexiLoggerError> { Logger::try_with_env_or_str("info")?.start()?; # Ok(())} ``` or, even shorter, use: ```rust flexi_logger::init(); ``` After that, you just use the log-macros from the log crate. Those log lines that match the log specification are then written to the default output channel (stderr). ## Choose the log output channel By default, logs are written to `stderr`. With one of - [`Logger::log_to_stdout`](crate::Logger::log_to_stdout), - [`Logger::log_to_file`](crate::Logger::log_to_file), - [`Logger::log_to_writer`](crate::Logger::log_to_writer), - [`Logger::log_to_file_and_writer`](crate::Logger::log_to_file_and_writer), - or [`Logger::do_not_log`](crate::Logger::do_not_log), you can send the logs to other destinations, or write them not at all. When writing to files or to a writer, you sometimes want to see some parts of the log additionally on the terminal; this can be achieved with [`Logger::duplicate_to_stderr`](crate::Logger::duplicate_to_stderr) or [`Logger::duplicate_to_stdout`](crate::Logger::duplicate_to_stdout), which duplicate log messages to the terminal. ```rust # use flexi_logger::{Logger,Duplicate, FileSpec}; # fn main() -> Result<(), Box> { Logger::try_with_str("info")? .log_to_file(FileSpec::default()) // write logs to file .duplicate_to_stderr(Duplicate::Warn) // print warnings and errors also to the console .start()?; # Ok(()) # } ``` ## Choose the write mode By default, every log line is directly written to the output, without buffering. This allows seeing new log lines in real time. With [`Logger::write_mode`](crate::Logger::write_mode) you have some options to change this behavior, e.g. - with [`WriteMode::BufferAndFlush`](crate::WriteMode::BufferAndFlush), or [`WriteMode::BufferAndFlushWith`](crate::WriteMode::BufferAndFlushWith), you can reduce the program's I/O overhead and thus increase overall performance, which can be relevant if logging is used heavily. In addition, to keep a short maximum wait time until a log line is visible in the output channel, an extra thread is created that flushes the buffers regularly. ```rust # use flexi_logger::{WriteMode,FileSpec,Logger,Duplicate}; fn main() -> Result<(), Box> { let _logger = Logger::try_with_str("info")? .log_to_file(FileSpec::default()) .write_mode(WriteMode::BufferAndFlush) .start()?; // ... do all your work ... Ok(()) } ```
- with [`WriteMode::Async`](crate::WriteMode::Async) or [`WriteMode::AsyncWith`](crate::WriteMode::AsyncWith), logs are sent from your application threads through an unbounded channel to an output thread, which does the output (and the rotation and the cleanup, if applicable). Additionally, the output is buffered, and a bounded message pool is used to reduce allocations, and flushing is used to avoid long delays. If duplication is used, the messages to `stdout` or `stderr` are written synchronously. ```rust # use flexi_logger::{WriteMode, Duplicate, FileSpec, Logger}; fn main() -> Result<(), Box> { # #[cfg(feature="async")] let _logger = Logger::try_with_str("info")? .log_to_file(FileSpec::default()) .write_mode(WriteMode::Async) .start()?; // ... do all your work ... Ok(()) } ```
- with [`WriteMode::SupportCapture`](crate::WriteMode::SupportCapture) you allow `cargo test` to capture log output and print it only for failing tests. Note that, with all write modes except [`WriteMode::Direct`](crate::WriteMode::Direct) (which is the default) and [`WriteMode::SupportCapture`](crate::WriteMode::SupportCapture), you should **keep the [`LoggerHandle`](crate::LoggerHandle) alive** up to the very end of your program, because, when its last instance is dropped (in case you use `LoggerHandle::clone()` you can have multiple instances), it will flush all writers to ensure that all buffered log lines are written before the program terminates, and then it calls their shutdown method. ## Influence the location and name of the log file By default, the log files are created in the current directory (where the program was started). With [`FileSpec:directory`](crate::FileSpec::directory) you can specify a concrete folder in which the files should be created. Using [`FileSpec::discriminant`](crate::FileSpec::discriminant) you can add a discriminating infix to the log file name. With [`FileSpec::suffix`](crate::FileSpec::suffix) you can change the suffix that is used for the log files. When writing to files, especially when they are in a distant folder, you may want to let the user know where the log file is. [`Logger::print_message`](crate::Logger::print_message) prints an info to `stdout` to which file the log is written. [`Logger::create_symlink`](crate::Logger::create_symlink) creates (on unix-systems only) a symbolic link at the specified path that points to the log file. ```rust # use flexi_logger::{FileSpec,Logger}; # fn main() -> Result<(), Box> { Logger::try_with_str("info")? .log_to_file( FileSpec::default() .directory("log_files") // create files in folder ./log_files .basename("foo") .discriminant("Sample4711A") // use infix in log file name .suffix("trc") // use suffix .trc instead of .log ) .print_message() // .create_symlink("current_run") // create a symbolic link to the current log file .start()?; # Ok(()) # } ``` This example will print a message like "Log is written to `./log_files/foo_Sample4711A_2020-11-17_19-24-35.trc`" and, on unix, create a symbolic link called `current_run`. ## Specify the format for the log lines explicitly With [`Logger::format`](crate::Logger::format) you set the format for all used output channels of `flexi_logger`. `flexi_logger` provides a couple of format functions, and you can also create and use your own, e.g. by copying and modifying one of the provided format functions (see [formats.rs](https://github.com/emabee/flexi_logger/blob/main/src/formats.rs)). Here's an example that you could create somewhere in your code. It also illustrates the signature the format function must have. ```rust,ignore pub fn my_own_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); write!( w, "{} [Thread {}] Severity {}, Message: {}", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), thread::current().name().unwrap_or(""), record.level(), &record.args() ) } ``` Depending on the configuration, `flexi_logger` can write logs to multiple channels (stdout, stderr, files, or additional writers) at the same time. You can control the format for each output channel individually, using [`Logger::format_for_files`](crate::Logger::format_for_files), [`Logger::format_for_stderr`](crate::Logger::format_for_stderr), [`Logger::format_for_stdout`](crate::Logger::format_for_stdout), or [`Logger::format_for_writer`](crate::Logger::format_for_writer). As argument for these functions you can use one of the provided non-coloring format functions - [`default_format`](crate::default_format) - [`detailed_format`](crate::detailed_format) - [`opt_format`](crate::opt_format) - [`with_thread`](crate::with_thread), or one of their coloring pendants - [`colored_default_format`](crate::colored_default_format) - [`colored_detailed_format`](crate::colored_detailed_format) - [`colored_opt_format`](crate::colored_opt_format) - [`colored_with_thread`](crate::colored_with_thread), or your own method. ### Adaptive Coloring You can use coloring for `stdout` and/or `stderr` _conditionally_, such that colors - are used when the output goes to a tty, - are suppressed when you e.g. pipe the output to some other program. You achieve that by providing one of the variants of [`AdaptiveFormat`](crate::AdaptiveFormat) to the respective format method, e.g. ```rust # use flexi_logger::AdaptiveFormat; # fn main() -> Result<(), Box> { # { flexi_logger::Logger::try_with_str("info")? .adaptive_format_for_stderr(AdaptiveFormat::Detailed); # } # Ok(()) # } ``` ### Defaults `flexi_logger` initializes by default equivalently to this: ```rust # mod example { # use flexi_logger::{Logger,AdaptiveFormat,default_format, FileSpec}; # use log::{debug, error, info, trace, warn}; # fn main() -> Result<(), Box> { # Logger::try_with_str("info")? // Write all error, warn, and info messages # .log_to_file(FileSpec::default().directory(std::env::temp_dir())) // ... .adaptive_format_for_stderr(AdaptiveFormat::Default) .adaptive_format_for_stdout(AdaptiveFormat::Default) .format_for_files(default_format) .format_for_writer(default_format) # .start()?; # error!("This is an error message"); # warn!("This is a warning"); # info!("This is an info message"); # debug!("This is a debug message - you must not see it!"); # trace!("This is a trace message - you must not see it!"); # run() # } # fn run() -> Result<(), Box> {Ok(())} # } ``` ## Use a fixed log file, and truncate or append the file on each program start With [`Logger::log_to_file`](crate::Logger::log_to_file) and without rotation, `flexi_logger` uses by default files with a timestamp in the name, like `foo_2020-11-16_08-37-44.log` (for a program called `foo`), which are quite unique for each program start. With [`FileSpec::suppress_timestamp`](crate::FileSpec::suppress_timestamp) you get a simple fixed filename, like `foo.log`. In that case, a restart of the program will truncate an existing log file. Use additionally [`Logger::append`](crate::Logger::append) to append the logs of each new run to the existing file. ```rust # use flexi_logger::{FileSpec, Logger}; # use log::{debug, error, info, trace, warn}; # fn main() -> Result<(), Box> { Logger::try_with_str("info")? // Write all error, warn, and info messages // use a simple filename without a timestamp .log_to_file( FileSpec::default().suppress_timestamp() # .directory(std::env::temp_dir()) ) // do not truncate the log file when the program is restarted .append() .start()?; # error!("This is an error message"); # warn!("This is a warning"); # info!("This is an info message"); # debug!("This is a debug message - you must not see it!"); # trace!("This is a trace message - you must not see it!"); # run() # } # fn run() -> Result<(), Box> {Ok(())} ``` ## Rotate the log file With rotation, the logs are always written to a file with the infix `rCURRENT`, like e.g. `foo_rCURRENT.log`. [`Logger::rotate`](crate::Logger::rotate) takes three enum arguments that define its behavior: - [`Criterion`](crate::Criterion) - with [`Criterion::Age`](crate::Criterion::Age) the rotation happens when the clock switches to a new day, hour, minute, or second - with [`Criterion::Size`](crate::Criterion::Size) the rotation happens when the current log file exceeds the specified limit - with [`Criterion::AgeOrSize`](crate::Criterion::AgeOrSize) the rotation happens when either of the two limits is reached - [`Naming`](crate::Naming)
The current file is then renamed - with [`Naming::Timestamps`](crate::Naming::Timestamps) to something like `foo_r2020-11-16_08-56-52.log` - with [`Naming::Numbers`](crate::Naming::Numbers) to something like `foo_r00000.log` and a fresh `rCURRENT` file is created. - [`Cleanup`](crate::Cleanup) defines if and how you avoid accumulating log files indefinitely: - with [`Cleanup::KeepLogFiles`](crate::Cleanup::KeepLogFiles) you specify the number of log files that should be retained; if there are more, the older ones are getting deleted - with [`Cleanup::KeepCompressedFiles`](crate::Cleanup::KeepCompressedFiles) you specify the number of log files that should be retained, and these are being compressed additionally - with [`Cleanup::KeepLogAndCompressedFiles`](crate::Cleanup::KeepLogAndCompressedFiles) you specify the number of log files that should be retained as is, and an additional number that are being compressed - with [`Cleanup::Never`](crate::Cleanup::Never) no cleanup is done, all files are retained. ```rust # use flexi_logger::{Age, Cleanup, Criterion, FileSpec, Logger, Naming}; # use log::{debug, error, info, trace, warn}; # fn main() -> Result<(), Box> { Logger::try_with_str("info")? // Write all error, warn, and info messages .log_to_file( FileSpec::default() # .directory(std::env::temp_dir()) ) .rotate( // If the program runs long enough, Criterion::Age(Age::Day), // - create a new file every day Naming::Timestamps, // - let the rotated files have a timestamp in their name Cleanup::KeepLogFiles(7), // - keep at most 7 log files ) .start()?; # error!("This is an error message"); # warn!("This is a warning"); # info!("This is an info message"); # debug!("This is a debug message - you must not see it!"); # trace!("This is a trace message - you must not see it!"); # run() # } # fn run() -> Result<(), Box> {Ok(())} ``` ## Reconfigure the log specification programmatically This can be especially handy in debugging situations where you want to see log output only for a short instant. Obtain the [`LoggerHandle`](crate::LoggerHandle) ```rust # use flexi_logger::Logger; let mut logger = Logger::try_with_str("info").unwrap() // ... logger configuration ... .start() .unwrap(); ``` and modify the effective log specification from within your code: ```rust, ignore # use flexi_logger::Logger; # let mut logger = Logger::try_with_str("info").unwrap().start().unwrap(); // ... logger.parse_and_push_temp_spec("info, critical_mod = trace"); // ... critical calls ... logger.pop_temp_spec(); // ... continue with the log spec you had before. ``` ## Reconfigure the log specification dynamically by editing a spec-file If you start `flexi_logger` with a specfile, ```rust # use flexi_logger::Logger; # let logger = Logger::try_with_str("info").unwrap() // ... logger configuration ... # ; # #[cfg(feature = "specfile")] # logger .start_with_specfile("./server/config/logspec.toml") .unwrap(); ``` then you can change the log specification dynamically, _while your program is running_, by editing the specfile. This can be a great help e.g. if you want to get detailed log output for _some_ requests to a long running server. See [`Logger::start_with_specfile`](crate::Logger::start_with_specfile) for more information. ## [Reconfigure the file log writer](#reconfigure-the-file-log-writer) When using `Logger::log_to_file()`, you can change most of the properties of the embedded `FileLogWriter` while the program is running using [`Logger::reset_flw`](crate::LoggerHandle::reset_flw). Obtain the [`LoggerHandle`](crate::LoggerHandle) when the program is started ```rust use flexi_logger::{writers::FileLogWriter, Cleanup, Criterion, FileSpec, Naming}; # use std::error::Error; # fn main() -> Result<(), Box> { let logger = flexi_logger::Logger::try_with_str("info")? .log_to_file( FileSpec::default() .basename("phase1") .directory("./log_files") ) .start()?; log::info!("start of phase 1"); # Ok(()) # } ``` and modify the file log writer later: ```rust # use std::error::Error; # use flexi_logger::{writers::FileLogWriter, Cleanup, Criterion, FileSpec, Naming}; # fn main() -> Result<(), Box> { # let logger = flexi_logger::Logger::try_with_str("info")? # .log_to_file(FileSpec::default().basename("phase1").directory("./log_files")) # .start()?; logger.reset_flw( &FileLogWriter::builder( FileSpec::default() .basename("phase2") .directory("./log_files") ) .append() .rotate( Criterion::Size(1024 * 1000 * 1), Naming::Numbers, Cleanup::KeepLogFiles(3), ), )?; log::info!("start of phase 2"); # Ok(()) # } ``` ## External file rotators If the log is written to files, `flexi_logger` decides, based on your configuration, to which file(s) the log is written, and expects that nobody else modifies these files. It offers quite some functionality to rotate, compress, and clean up log files. Alternatively, tools like linux' `logrotate` can be used to rotate, compress or remove log files. But renaming or deleting the current output file e.g. might not stop `flexi_logger` from writing to the now renamed file! See [`LoggerHandle::reopen_outputfile`](../struct.LoggerHandle.html#method.reopen_outputfile) to understand how to cope with external rotators. ## Miscellaneous For the sake of completeness, we refer here to some more configuration methods. See their documentation for more details. [`Logger::set_palette`](crate::Logger::set_palette) [`Logger::cleanup_in_background_thread`](crate::Logger::cleanup_in_background_thread) [`Logger::use_windows_line_ending`](crate::Logger::use_windows_line_ending) [`Logger::add_writer`](crate::Logger::add_writer) flexi_logger-0.29.8/src/code_examples.rs000064400000000000000000000001371046102023000163400ustar 00000000000000//! Examples for the `flexi_logger` initialization. #![doc = include_str!("code_examples.md")] flexi_logger-0.29.8/src/deferred_now.rs000064400000000000000000000174271046102023000162050ustar 00000000000000use chrono::{ format::{DelayedFormat, StrftimeItems}, DateTime, Local, SecondsFormat, Utc, }; #[cfg(feature = "syslog_writer")] use chrono::{Datelike, Timelike}; use std::sync::{Mutex, OnceLock}; /// Deferred timestamp creation. /// /// Is used to ensure that a log record that is sent to multiple outputs /// (in maybe different formats) always uses the same timestamp. #[derive(Debug, Default)] pub struct DeferredNow(Option>); impl<'a> DeferredNow { /// Constructs a new instance, but does not generate the timestamp. #[must_use] pub fn new() -> Self { Self(None) } #[cfg(test)] #[must_use] fn new_from_datetime(dt: DateTime) -> Self { Self(Some(dt)) } /// Retrieve the timestamp for local time zone. /// /// Requires mutability because the first caller will generate the timestamp. pub fn now(&'a mut self) -> &'a DateTime { self.0.get_or_insert_with(Local::now) } /// Retrieve the UTC timestamp. /// /// Requires mutability because the first caller will generate the timestamp. pub fn now_utc_owned(&'a mut self) -> DateTime { (*self.now()).into() } /// Produces a preformatted object suitable for printing. /// /// # Panics /// /// Panics if `fmt` has an inappropriate value. pub fn format<'b>(&'a mut self, fmt: &'b str) -> DelayedFormat> { if use_utc() { self.now_utc_owned().format(fmt) } else { self.now().format(fmt) } } /// Prints itself in a format compliant with RFC 3339. /// /// Example: 2021-04-29T13:14:15.678+01:00 /// /// We do not use the Z variant of RFC 3339, because it is often misinterpreted. pub fn format_rfc3339(&mut self) -> String { if use_utc() { self.now_utc_owned() .to_rfc3339_opts(SecondsFormat::Millis, false) } else { self.now().to_rfc3339_opts(SecondsFormat::Millis, false) } } // format_rfc3164: Mmm dd hh:mm:ss, where // mmm = one of "Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec", // dd = "xy" where x = " " or "1" or "2" or "3" // hh = "00" ... "23" // mm, ss= "00" ... "59" #[cfg(feature = "syslog_writer")] pub(crate) fn format_rfc3164(&mut self) -> String { let (date, time) = if use_utc() { let now = self.now_utc_owned(); (now.date_naive(), now.time()) } else { let now = self.now(); (now.date_naive(), now.time()) }; format!( "{mmm} {dd:>2} {hh:02}:{mm:02}:{ss:02}", mmm = match date.month() { 1 => "Jan", 2 => "Feb", 3 => "Mar", 4 => "Apr", 5 => "May", 6 => "Jun", 7 => "Jul", 8 => "Aug", 9 => "Sep", 10 => "Oct", 11 => "Nov", 12 => "Dec", _ => unreachable!(), }, dd = date.day(), hh = time.hour(), mm = time.minute(), ss = time.second() ) } /// Enforce the use of UTC rather than local time. /// /// By default, `flexi_logger` uses or tries to use local time. /// By calling early in your program either `Logger::use_utc()` or directly this method, /// you can override this to always use UTC. /// /// # Panics /// /// Panics if called too late, i.e., if [`DeferredNow::now`] was already called before on /// any instance of `DeferredNow`. pub fn force_utc() { let mut cfg_force_utc = cfg_force_utc().lock().unwrap(); match *cfg_force_utc { Some(false) => { panic!("offset is already initialized not to enforce UTC"); } Some(true) => { // is already set, nothing to do } None => *cfg_force_utc = Some(true), } } } fn cfg_force_utc() -> &'static Mutex> { static CFG_FORCE_UTC: OnceLock>> = OnceLock::new(); CFG_FORCE_UTC.get_or_init(|| Mutex::new(None)) } fn use_utc() -> bool { let mut cfg_force_utc = cfg_force_utc().lock().unwrap(); if let Some(true) = *cfg_force_utc { true } else { if cfg_force_utc.is_none() { *cfg_force_utc = Some(false); } false } } #[cfg(test)] pub(crate) fn set_force_utc(b: bool) { let mut cfg_force_utc = cfg_force_utc().lock().unwrap(); *cfg_force_utc = Some(b); } #[cfg(test)] mod test { use crate::DeferredNow; use chrono::{ DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, SecondsFormat, TimeZone, Utc, }; #[test] fn test_timestamp_taken_only_once() { let mut deferred_now = super::DeferredNow::new(); let once = *deferred_now.now(); std::thread::sleep(std::time::Duration::from_millis(30)); let again = *deferred_now.now(); assert_eq!(once, again); println!("Now: {}", deferred_now.format("%Y-%m-%d %H:%M:%S%.6f %:z")); println!("Now: {}", once.format("%Y-%m-%d %H:%M:%S%.6f %:z")); println!("Now: {}", again.format("%Y-%m-%d %H:%M:%S%.6f %:z")); } fn utc_and_offset_timestamps() -> (DateTime, DateTime) { let naive_datetime = NaiveDateTime::new( NaiveDate::from_ymd_opt(2021, 4, 29).unwrap(), NaiveTime::from_hms_milli_opt(13, 14, 15, 678).unwrap(), ); ( Utc.from_local_datetime(&naive_datetime).unwrap(), FixedOffset::east_opt(3600) .unwrap() .from_local_datetime(&naive_datetime) .unwrap(), ) } fn get_deferred_nows() -> (DeferredNow, DeferredNow) { let (ts_utc, ts_plus1) = utc_and_offset_timestamps(); ( DeferredNow::new_from_datetime(ts_utc.into()), DeferredNow::new_from_datetime(ts_plus1.into()), ) } #[test] fn test_chrono_rfc3339() { let (ts_utc, ts_plus1) = utc_and_offset_timestamps(); assert_eq!( ts_utc.to_rfc3339_opts(SecondsFormat::Millis, true), "2021-04-29T13:14:15.678Z", ); assert_eq!( ts_plus1.to_rfc3339_opts(SecondsFormat::Millis, true), "2021-04-29T13:14:15.678+01:00", ); assert_eq!( ts_utc.to_rfc3339_opts(SecondsFormat::Millis, false), "2021-04-29T13:14:15.678+00:00", ); assert_eq!( ts_plus1.to_rfc3339_opts(SecondsFormat::Millis, false), "2021-04-29T13:14:15.678+01:00", ); } #[test] fn test_formats() { #[cfg(feature = "syslog_writer")] { log::info!("test rfc3164"); super::set_force_utc(true); let (mut dn1, mut dn2) = get_deferred_nows(); assert_eq!("Apr 29 13:14:15", &dn1.format_rfc3164()); assert_eq!("Apr 29 12:14:15", &dn2.format_rfc3164()); } log::info!("test rfc3339"); { // with local timestamps, offsets ≠ 0 are printed (except in Greenwich time zone): super::set_force_utc(false); let (mut dn1, mut dn2) = get_deferred_nows(); log::info!("2021-04-29T15:14:15.678+02:00, {}", &dn1.format_rfc3339()); log::info!("2021-04-29T14:14:15.678+02:00, {}", &dn2.format_rfc3339()); // with utc, the timestamps are normalized to offset 0 super::set_force_utc(true); let (mut dn1, mut dn2) = get_deferred_nows(); assert_eq!("2021-04-29T13:14:15.678+00:00", &dn1.format_rfc3339()); assert_eq!("2021-04-29T12:14:15.678+00:00", &dn2.format_rfc3339()); } } } flexi_logger-0.29.8/src/error_info.rs000064400000000000000000000064411046102023000157000ustar 00000000000000//! Error codes of `flexi_logger`. //! //! The following error codes are used to indicate the reason of an error. //! More details on them can be found here. //! //! ## `Write` //! //! Writing the log line to the output failed. //! //! Example: //! //! ```text //! [flexi_logger][ERRCODE::Write] writing log line failed, caused by Send //! ``` //! //! Possible reasons depend on the `WriteMode` and the output channel. //! //! With an asynchronous `WriteMode`, the root cause can be that the logger handle that was returned //! from the logger initialization was not assigned to a variable to keep it alive (see also //! [`Logger::start()`](https://docs.rs/flexi_logger/latest/flexi_logger/struct.Logger.html#method.start)). //! It is then dropped immediately, and in its `Drop` impl it cleans up all resources, //! including the asynchronous writer. So the next log output will fail with this error. //! //! ## `Flush` //! //! Explicit or automatic flushing of buffered log lines to the output failed. //! //! Example: //! //! ```text //! [flexi_logger][ERRCODE::Flush] flushing primary writer failed, caused by Send //! ``` //! //! For possible reasons, see [Write](#write). //! //! ## `Format` //! //! The chosen format function had produced an error. //! //! Example: //! //! ```text //! [flexi_logger][ERRCODE::Format] formatting failed, caused by ... //! ``` //! //! If this happens with one of `flexi_logger`s provided format functions, please open an issue. //! //! ## `Palette` //! //! This error is unexpected - please open an issue and describe your setup. //! //! ## `Poison` //! //! Log entries can be written by all threads of your program. Loggers thus must be thread-safe, //! by guarding their mutable parts with `Mutex`es, `RwLocks`, etc. In case that a thread panics //! while owning one of these locks, the lock is subsequently considered "poisoned". //! //! A typical root cause for this is some `panic!` in a `Debug` or `Display` implementation //! of a logged object. //! //! ## `LogFile` //! //! The `FileLogWriter` is not able to rotate the log file. The reason should be printed as well. //! //! ## `LogFileWatcher` //! //! The `FileLogWriter` is not able to watch the log file. The reason should be printed as well. //! //! ## `LogSpecFile` //! //! This error can only occur if you use `Logger::start_with_specfile`, where you specify a //! log-specification-file that you can edit, while the program is running, to influence //! which log lines it should write. //! //! Examples: //! //! ```text //! [flexi_logger][ERRCODE::LogSpecFile] continuing with previous log specification, //! because rereading the log specification file failed, caused by ... //! ``` //! //! The log-specification-file you chose with `Logger::start_with_specfile` cannot be opened, //! read, or successfully parsed. //! //! ```text //! [flexi_logger][ERRCODE::LogSpecFile] error while watching the specfile, caused by ... //! ``` //! //! Watching the log-specification-file failed. //! //! ## `Symlink` //! //! This error can only occur on unix systems, and when you use `Logger::create_symlink`, and //! indicates an issue with creating or replacing the symbolic link to the log file. //! //! ## `WriterSpec` //! //! The code uses in some log macro call the syntax to send the log line to a certain `LogWriter`, //! but this log writer does not exist. //! flexi_logger-0.29.8/src/filter.rs000064400000000000000000000040111046102023000150100ustar 00000000000000//! This module contains two traits which allow adding a stateful filter //! using [`Logger::filter`](crate::Logger::filter). //! //! # Example //! //! ```rust //! use flexi_logger::{ //! filter::{LogLineFilter, LogLineWriter}, //! DeferredNow, FlexiLoggerError, //! }; //! //! pub struct BarsOnly; //! impl LogLineFilter for BarsOnly { //! fn write( //! &self, //! now: &mut DeferredNow, //! record: &log::Record, //! log_line_writer: &dyn LogLineWriter, //! ) -> std::io::Result<()> { //! if record.args().to_string().contains("bar") { //! log_line_writer.write(now, record)?; //! } //! Ok(()) //! } //! } //! //! fn main() -> Result<(), FlexiLoggerError> { //! flexi_logger::Logger::try_with_str("info")? //! .filter(Box::new(BarsOnly)) //! .start()?; //! log::info!("barista"); //! log::info!("foo"); // will be swallowed by the filter //! log::info!("bar"); //! log::info!("gaga"); // will be swallowed by the filter //! Ok(()) //! } //! ``` use crate::DeferredNow; use log::Record; /// Trait of the filter object. #[allow(clippy::module_name_repetitions)] pub trait LogLineFilter { /// Each log line that `flexi_logger` would write to the configured output channel is /// sent to this method. /// /// Note that the log line only appears in the configured output channel if the /// filter implementation forwards it to the provided `LogLineWriter`. /// /// # Errors /// /// If writing to the configured output channel fails. fn write( &self, now: &mut DeferredNow, record: &Record, log_line_writer: &dyn LogLineWriter, ) -> std::io::Result<()>; } /// Write out a single log line pub trait LogLineWriter { /// Write out a log line to the configured output channel. /// /// # Errors /// /// If writing to the configured output channel fails. fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()>; } flexi_logger-0.29.8/src/flexi_error.rs000064400000000000000000000070531046102023000160540ustar 00000000000000use crate::log_specification::LogSpecification; // use std::backtrace::Backtrace; use thiserror::Error; /// Describes errors in the initialization of `flexi_logger`. #[non_exhaustive] #[derive(Error, Debug)] pub enum FlexiLoggerError { /// Chosen reset not possible. #[error("Chosen reset not possible")] Reset, /// Method not possible because duplication is not possible. #[error("Method not possible because duplication is not possible")] NoDuplication, /// Method not possible because no file logger is configured. #[error("Method not possible because no file logger is configured")] NoFileLogger, /// Log file cannot be written because the specified path is not a directory. #[error("Log file cannot be written because the specified path is not a directory")] OutputBadDirectory, /// Log file cannot be written because the specified path is a directory. #[error("Log file cannot be written because the specified path is a directory")] OutputBadFile, /// Spawning the cleanup thread failed. /// /// This error can safely be avoided with `Logger::cleanup_in_background_thread(false)`. #[error("Spawning the cleanup thread failed.")] OutputCleanupThread(std::io::Error), /// Log cannot be written, e.g. because the configured output directory is not accessible. #[error( "Log cannot be written, e.g. because the configured output directory is not accessible" )] OutputIo(#[from] std::io::Error), /// Parsing the configured logspec toml-file failed. #[error("Parsing the configured logspec toml-file failed")] #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] SpecfileToml(#[from] toml::de::Error), /// Specfile cannot be accessed or created. #[error("Specfile cannot be accessed or created")] #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] SpecfileIo(std::io::Error), /// Specfile has an unsupported extension. #[error("Specfile has an unsupported extension")] #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] SpecfileExtension(&'static str), /// Invalid level filter. #[error("Invalid level filter")] LevelFilter(String), /// Failed to parse log specification. /// /// The String contains a description of the error, the second parameter /// contains the resulting [`LogSpecification`] object #[error("Failed to parse log specification: {0}")] Parse(String, LogSpecification), /// Logger initialization failed. #[error("Logger initialization failed")] Log(#[from] log::SetLoggerError), /// Some synchronization object is poisoned. #[error("Some synchronization object is poisoned")] Poison, /// Palette parsing failed #[error("Palette parsing failed")] Palette(#[from] std::num::ParseIntError), /// Logger is shut down. #[cfg(feature = "async")] #[cfg_attr(docsrs, doc(cfg(feature = "async")))] #[error("Logger is shut down")] Shutdown(#[from] crossbeam_channel::SendError>), /// Tracing initialization failed. #[cfg(feature = "trc")] #[cfg_attr(docsrs, doc(cfg(feature = "trc")))] #[error("Tracing initialization failed")] TracingSetup(#[from] tracing::subscriber::SetGlobalDefaultError), } impl From for FlexiLoggerError { fn from(_other: std::convert::Infallible) -> FlexiLoggerError { unreachable!("lkjl,mnkjiu") } } flexi_logger-0.29.8/src/flexi_logger.rs000064400000000000000000000126711046102023000162040ustar 00000000000000use crate::{ filter::LogLineFilter, primary_writer::PrimaryWriter, util::{eprint_err, eprint_msg, ErrorCode}, writers::LogWriter, DeferredNow, LogSpecification, }; #[cfg(feature = "textfilter")] use regex::Regex; use std::collections::HashMap; use std::sync::{Arc, RwLock}; // Implements log::Log to plug into the log crate. // // Delegates the real logging to the configured PrimaryWriter and optionally to other writers. pub(crate) struct FlexiLogger { log_specification: Arc>, primary_writer: Arc, other_writers: Arc>>, filter: Option>, } impl FlexiLogger { pub fn new( log_specification: Arc>, primary_writer: Arc, other_writers: Arc>>, filter: Option>, ) -> Self { Self { log_specification, primary_writer, other_writers, filter, } } fn primary_enabled(&self, level: log::Level, module: &str) -> bool { self.log_specification .read() .map_err(|e| eprint_err(ErrorCode::Poison, "rwlock on log spec is poisoned", &e)) .unwrap() .enabled(level, module) } } impl log::Log for FlexiLogger { // If other writers are configured and the metadata target addresses them correctly, // - we should determine if the metadata-level is digested by any of the writers // (including the primary writer) // else we fall back to default behavior: // Return true if // - target is filled with module path and level is accepted by log specification // - target is filled with crap and ??? fn enabled(&self, metadata: &log::Metadata) -> bool { let target = metadata.target(); let level = metadata.level(); if !self.other_writers.is_empty() && target.starts_with('{') { // at least one other writer is configured _and_ addressed let targets: Vec<&str> = target[1..(target.len() - 1)].split(',').collect(); for t in targets { if t != "_Default" { match self.other_writers.get(t) { None => { eprint_msg(ErrorCode::WriterSpec, &format!("bad writer spec: {t}")); } Some(writer) => { if level < writer.max_log_level() { return true; } } } } } } self.primary_enabled(level, target) } fn log(&self, record: &log::Record) { let target = record.metadata().target(); let mut now = DeferredNow::new(); let special_target_is_used = target.starts_with('{'); if special_target_is_used { let mut use_default = false; let targets: Vec<&str> = target[1..(target.len() - 1)].split(',').collect(); for t in targets { if t == "_Default" { use_default = true; } else { match self.other_writers.get(t) { None => { eprint_msg(ErrorCode::WriterSpec, &format!("bad writer spec: {t}")); } Some(writer) => { writer.write(&mut now, record).unwrap_or_else(|e| { eprint_err( ErrorCode::Write, &format!("writing log line to custom writer \"{t}\" failed"), &e, ); }); } } } } if !use_default { return; } } let effective_target = if special_target_is_used { record.module_path().unwrap_or_default() } else { target }; if !self.primary_enabled(record.level(), effective_target) { return; } #[cfg(feature = "textfilter")] { // closure that we need below let check_text_filter = |text_filter: Option<&Regex>| { text_filter.map_or(true, |filter| filter.is_match(&record.args().to_string())) }; if !check_text_filter( self.log_specification.read().as_ref().unwrap(/* expose this? */).text_filter(), ) { return; } } if let Some(ref filter) = self.filter { filter.write(&mut now, record, &(*self.primary_writer)) } else { self.primary_writer.write(&mut now, record) } .unwrap_or_else(|e| { eprint_err(ErrorCode::Write, "writing log line failed", &e); }); } fn flush(&self) { self.primary_writer.flush().unwrap_or_else(|e| { eprint_err(ErrorCode::Flush, "flushing primary writer failed", &e); }); for writer in self.other_writers.values() { writer.flush().unwrap_or_else(|e| { eprint_err(ErrorCode::Flush, "flushing custom writer failed", &e); }); } } } flexi_logger-0.29.8/src/formats.rs000064400000000000000000000442011046102023000152030ustar 00000000000000use crate::DeferredNow; #[cfg(feature = "kv")] use log::kv::{self, Key, Value, VisitSource}; use log::Record; #[cfg(feature = "colors")] use nu_ansi_term::{Color, Style}; #[cfg(feature = "json")] use serde_derive::Serialize; #[cfg(feature = "kv")] use std::collections::BTreeMap; #[cfg(feature = "colors")] use std::sync::OnceLock; use std::thread; /// Time stamp format that is used by the provided format functions. pub const TS_DASHES_BLANK_COLONS_DOT_BLANK: &str = "%Y-%m-%d %H:%M:%S%.6f %:z"; // Helpers for printing key-value pairs #[cfg(feature = "kv")] fn write_key_value_pairs( w: &mut dyn std::io::Write, record: &Record<'_>, ) -> Result<(), std::io::Error> { if record.key_values().count() > 0 { write!(w, "{{")?; let mut kv_stream = KvStream(w, false); record.key_values().visit(&mut kv_stream).ok(); write!(w, "}} ")?; } Ok(()) } #[cfg(feature = "kv")] struct KvStream<'a>(&'a mut dyn std::io::Write, bool); #[cfg(feature = "kv")] impl<'kvs, 'a> log::kv::VisitSource<'kvs> for KvStream<'a> where 'kvs: 'a, { fn visit_pair( &mut self, key: log::kv::Key<'kvs>, value: log::kv::Value<'kvs>, ) -> Result<(), log::kv::Error> { if self.1 { write!(self.0, ", ")?; } write!(self.0, "{key}={value:?}")?; self.1 = true; Ok(()) } } /// A logline-formatter that produces log lines like
/// ```INFO [my_prog::some_submodule] Task successfully read from conf.json```. /// /// If the kv-feature is used, the log lines look like /// ```INFO [my_prog::some_submodule] {a=17, b="foo"} Task successfully read from conf.json``` /// /// # Errors /// /// See `std::write` pub fn default_format( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { write!( w, "{} [{}] ", record.level(), record.module_path().unwrap_or(""), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", record.args()) } /// A colored version of the logline-formatter `default_format`. /// /// It produces log lines like
/// ERROR [`my_prog::some_submodule`] File not found /// /// See method [`style`](crate::style) if you want to influence coloring. /// /// # Errors /// /// See `std::write` #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] pub fn colored_default_format( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); write!( w, "{} [{}] ", style(level).paint(level.to_string()), record.module_path().unwrap_or(""), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", style(level).paint(record.args().to_string())) } /// A logline-formatter that produces log lines with timestamp and file location, like ///
/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [src/foo/bar:26] Task successfully read from conf.json``` ///
/// /// # Errors /// /// See `std::write` pub fn opt_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { write!( w, "[{}] {} [{}:{}] ", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", &record.args()) } /// A colored version of the logline-formatter `opt_format`. /// /// See method [`style`](crate::style) if you want to influence coloring. /// /// # Errors /// /// See `std::write` #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] pub fn colored_opt_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); write!( w, "[{}] {} [{}:{}] ", style(level).paint(now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK).to_string()), style(level).paint(level.to_string()), record.file().unwrap_or(""), record.line().unwrap_or(0), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", style(level).paint(record.args().to_string())) } /// A logline-formatter that produces log lines like
/// ```[2016-01-13 15:25:01.640870 +01:00] INFO [foo::bar] src/foo/bar.rs:26: Task successfully read from conf.json``` /// /// I.e. with timestamp, module path and file location. /// /// # Errors /// /// See `std::write` pub fn detailed_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { write!( w, "[{}] {} [{}] {}:{}: ", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), record.level(), record.module_path().unwrap_or(""), record.file().unwrap_or(""), record.line().unwrap_or(0), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", &record.args()) } /// A colored version of the logline-formatter `detailed_format`. /// /// See method [`style`](crate::style) if you want to influence coloring. /// /// # Errors /// /// See `std::write` #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] pub fn colored_detailed_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); write!( w, "[{}] {} [{}] {}:{}: ", style(level).paint(now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK).to_string()), style(level).paint(record.level().to_string()), record.module_path().unwrap_or(""), record.file().unwrap_or(""), record.line().unwrap_or(0), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", style(level).paint(record.args().to_string())) } /// A logline-formatter that produces log lines like
/// ```[2016-01-13 15:25:01.640870 +01:00] T[taskreader] INFO [src/foo/bar:26] Task successfully read from conf.json``` /// /// I.e. with timestamp, thread name and file location. /// /// # Errors /// /// See `std::write` pub fn with_thread( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { write!( w, "[{}] T[{}] {} [{}:{}] ", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), thread::current().name().unwrap_or(""), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", &record.args()) } /// A colored version of the logline-formatter `with_thread`. /// /// See method [`style`](crate::style) if you want to influence coloring. /// /// # Errors /// /// See `std::write` #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] pub fn colored_with_thread( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); write!( w, "[{}] T[{}] {} [{}:{}] ", style(level).paint(now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK).to_string()), style(level).paint(thread::current().name().unwrap_or("")), style(level).paint(level.to_string()), record.file().unwrap_or(""), record.line().unwrap_or(0), )?; #[cfg(feature = "kv")] write_key_value_pairs(w, record)?; write!(w, "{}", style(level).paint(record.args().to_string())) } /// A logline-formatter that produces log lines in json format. /// /// # Errors /// /// See `std::write` #[cfg_attr(docsrs, doc(cfg(feature = "json")))] #[cfg(feature = "json")] pub fn json_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let current_thread = thread::current(); let logline = LogLine { level: record.level().as_str(), timestamp: now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK).to_string(), thread: current_thread.name(), module_path: record.module_path(), file: record.file(), line: record.line(), #[cfg(feature = "kv")] kv: { let key_values = record.key_values(); if key_values.count() > 0 { let mut collect = Collect(BTreeMap::new()); key_values.visit(&mut collect).ok(); Some(collect.0) } else { None } }, text: record.args(), }; write!( w, "{}", serde_json::to_string(&logline) .unwrap_or_else(|e| format!("serde_json::to_string() failed with {e}")) ) } #[cfg(feature = "json")] #[derive(Serialize)] struct LogLine<'a> { level: &'a str, timestamp: String, #[serde(skip_serializing_if = "Option::is_none")] thread: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] module_path: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] file: Option<&'a str>, #[serde(skip_serializing_if = "Option::is_none")] line: Option, #[serde(skip_serializing_if = "Option::is_none")] #[cfg(feature = "kv")] kv: Option, Value<'a>>>, text: &'a std::fmt::Arguments<'a>, } #[cfg(feature = "kv")] struct Collect<'kvs>(BTreeMap, Value<'kvs>>); #[cfg(feature = "kv")] impl<'kvs> VisitSource<'kvs> for Collect<'kvs> { fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), kv::Error> { self.0.insert(key, value); Ok(()) } } #[cfg(feature = "colors")] const DEFAULT_PALETTE: Palette = Palette::default(); #[cfg(feature = "colors")] static PALETTE: OnceLock = OnceLock::new(); #[cfg(feature = "colors")] fn palette() -> &'static Palette { PALETTE.get().unwrap_or(&DEFAULT_PALETTE) } // Overwrites the default PALETTE value either from the environment, if set, // or from the parameter, if filled. // Returns an error if parsing failed. #[cfg(feature = "colors")] pub(crate) fn set_palette(input: Option<&str>) -> Result<(), std::num::ParseIntError> { use crate::util::{eprint_msg, ErrorCode}; PALETTE .set(match std::env::var_os("FLEXI_LOGGER_PALETTE") { Some(ref env_osstring) => Palette::from(env_osstring.to_string_lossy().as_ref())?, None => match input { Some(input_string) => Palette::from(input_string)?, None => DEFAULT_PALETTE, }, }) .map_err(|_palette| { eprint_msg( ErrorCode::Palette, "Failed to initialize the palette, as it is already initialized", ); }) .ok(); Ok(()) } /// Helper function that is used in the provided coloring format functions to apply /// colors based on the log level and the effective color palette. /// /// See [`Logger::set_palette`](crate::Logger::set_palette) if you want to /// modify the color palette. #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] #[must_use] pub fn style(level: log::Level) -> Style { match level { log::Level::Error => palette().error, log::Level::Warn => palette().warn, log::Level::Info => palette().info, log::Level::Debug => palette().debug, log::Level::Trace => palette().trace, } } #[cfg(feature = "colors")] const fn default_style() -> Style { Style { foreground: None, background: None, is_bold: false, is_dimmed: false, is_italic: false, is_underline: false, is_blink: false, is_reverse: false, is_hidden: false, is_strikethrough: false, prefix_with_reset: false, } } #[cfg(feature = "colors")] #[derive(Debug)] struct Palette { pub error: Style, pub warn: Style, pub info: Style, pub debug: Style, pub trace: Style, } #[cfg(feature = "colors")] impl Palette { const fn default() -> Palette { Palette { error: default_style().fg(Color::Fixed(196)), warn: default_style().fg(Color::Fixed(208)), info: default_style(), debug: default_style().fg(Color::Fixed(27)), trace: default_style().fg(Color::Fixed(8)), } } fn from(palette_string: &str) -> Result { let mut items = palette_string.split(';'); Ok(Palette { error: parse_style(items.next().unwrap_or("196").trim())?, warn: parse_style(items.next().unwrap_or("208").trim())?, info: parse_style(items.next().unwrap_or("-").trim())?, debug: parse_style(items.next().unwrap_or("27").trim())?, trace: parse_style(items.next().unwrap_or("8").trim())?, }) } } #[cfg(feature = "colors")] fn parse_style(input: &str) -> Result { Ok(if input == "-" { Style::new() } else { match input.strip_prefix('b') { None => Style::new().fg(Color::Fixed(input.parse()?)), Some(s) => Style::new().bold().fg(Color::Fixed(s.parse()?)), } }) } /// Helps to use coloring only if the output goes to a tty. /// /// Is used in /// [`Logger::adaptive_format_for_stderr`](crate::Logger::adaptive_format_for_stderr) and /// [`Logger::adaptive_format_for_stdout`](crate::Logger::adaptive_format_for_stdout), /// which switch off coloring if the output is not going to a tty but is piped into another /// program, because then color control byte sequences are usually not expected. #[derive(Clone, Copy)] pub enum AdaptiveFormat { /// Chooses between [`default_format`](crate::default_format) /// and [`colored_default_format`](crate::colored_default_format). #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] Default, /// Chooses between [`detailed_format`](crate::detailed_format) /// and [`colored_detailed_format`](crate::colored_detailed_format). #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] Detailed, /// Chooses between [`opt_format`](crate::opt_format) /// and [`colored_opt_format`](crate::colored_opt_format). #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] Opt, /// Chooses between [`with_thread`](crate::with_thread) /// and [`colored_with_thread`](crate::colored_with_thread). #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] WithThread, /// Chooses between the first format function (which is supposed to be uncolored) /// and the second (which is supposed to be colored). /// /// Allows providing own format functions, with freely choosable coloring technique, /// _and_ making use of the tty detection. Custom(FormatFunction, FormatFunction), } impl AdaptiveFormat { #[must_use] pub(crate) fn format_function(self, is_tty: bool) -> FormatFunction { if is_tty { match self { #[cfg(feature = "colors")] Self::Default => colored_default_format, #[cfg(feature = "colors")] Self::Detailed => colored_detailed_format, #[cfg(feature = "colors")] Self::Opt => colored_opt_format, #[cfg(feature = "colors")] Self::WithThread => colored_with_thread, Self::Custom(_, colored) => colored, } } else { match self { #[cfg(feature = "colors")] Self::Default => default_format, #[cfg(feature = "colors")] Self::Detailed => detailed_format, #[cfg(feature = "colors")] Self::Opt => opt_format, #[cfg(feature = "colors")] Self::WithThread => with_thread, Self::Custom(uncolored, _) => uncolored, } } } } /// Function type for format functions. /// /// If you want to write the log lines in your own format, /// implement a function with this signature and provide it to one of the methods /// [`Logger::format()`](crate::Logger::format), /// [`Logger::format_for_files()`](crate::Logger::format_for_files), /// [`Logger::format_for_stdout()`](crate::Logger::format_for_stdout), /// or [`Logger::format_for_stderr()`](crate::Logger::format_for_stderr). /// /// Check out the code of the provided [format functions](index.html#functions) /// if you want to start with a template. /// /// ## Parameters /// /// - `write`: the output stream /// /// - `now`: the timestamp that you should use if you want a timestamp to appear in the log line /// /// - `record`: the log line's content and metadata, as provided by the log crate's macros. /// pub type FormatFunction = fn( write: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error>; #[cfg(test)] mod test { use crate::DeferredNow; #[test] fn test_opt_format() { let mut buf = Vec::::new(); let w = &mut buf; let mut now = DeferredNow::new(); let record = log::Record::builder() .file(Some("a")) .line(Some(1)) .args(format_args!("test message")) .build(); super::opt_format(w, &mut now, &record).unwrap(); // [2016-01-13 15:25:01.640870 +01:00] assert_eq!(buf[0], b'['); assert_eq!(buf[5], b'-'); assert_eq!(buf[8], b'-'); assert_eq!(buf[11], b' '); assert_eq!(buf[14], b':'); assert_eq!(buf[17], b':'); assert_eq!(buf[20], b'.'); assert_eq!(buf[27], b' '); assert_eq!(buf[28], b'+'); assert_eq!(buf[31], b':'); assert_eq!(buf[34], b']'); let s = String::from_utf8(buf[35..].to_vec()).unwrap(); assert_eq!(s.as_str(), " INFO [a:1] test message"); println!("s: {s}"); } } flexi_logger-0.29.8/src/lib.rs000064400000000000000000000075161046102023000143060ustar 00000000000000// only enables the `doc_cfg` feature when the `docsrs` configuration attribute is defined #![cfg_attr(docsrs, feature(doc_cfg))] #![deny(missing_docs)] #![deny(clippy::all)] #![deny(clippy::pedantic)] #![forbid(unsafe_code)] //! A flexible and easy-to-use logger that writes logs to stderr and/or to files //! or other output streams. //! //! To read the log specification from an environment variable and get the log written to `stderr`, //! start `flexi_logger` e.g. like this: //! ```rust //! flexi_logger::Logger::try_with_env().unwrap().start().unwrap(); //! ``` //! //! See //! //! * The builder [`Logger`] for a full description of all configuration options, //! * module [`code_examples`] for various concrete examples of `flexi_logger` initialization //! * the module [`writers`] for the usage of additional log writers, //! * and [the README](https://crates.io/crates/flexi_logger) for how to get started. //! //! There are configuration options to e.g. //! //! * decide whether you want to write your logs to stderr or to a file, //! * configure the path and the filenames of the log files, //! * use file rotation, //! * specify the line format for the log lines, //! * apply a stateful filter before log lines are really written, //! * define additional log output streams, e.g for alert or security messages, //! * support changing the log specification while the program is running, //! //! `flexi_logger` uses a similar syntax as [`env_logger`](http://crates.io/crates/env_logger/) //! for specifying which logs should really be written (but is more graceful with the syntax, //! and can provide error information). //! //! By default, i.e. if feature `colors` is not switched off, the log lines that appear on your //! terminal are coloured. In case the chosen colors don't fit to your terminal's color theme, //! you can adapt the colors to improve readability. //! See the documentation of method [`Logger::set_palette`] //! for a description how this can be done. mod deferred_now; mod flexi_error; mod flexi_logger; mod formats; mod log_specification; mod logger; mod logger_handle; mod parameters; mod primary_writer; mod threads; #[cfg(feature = "trc")] #[cfg_attr(docsrs, doc(cfg(feature = "trc")))] pub mod trc; mod write_mode; pub mod code_examples; pub mod filter; mod util; pub mod writers; pub mod error_info; pub(crate) use crate::write_mode::EffectiveWriteMode; #[cfg(feature = "async")] pub use crate::write_mode::{DEFAULT_MESSAGE_CAPA, DEFAULT_POOL_CAPA}; pub use crate::{ deferred_now::DeferredNow, flexi_error::FlexiLoggerError, formats::*, log_specification::{LogSpecBuilder, LogSpecification, ModuleFilter}, logger::{Duplicate, ErrorChannel, Logger}, logger_handle::{LogfileSelector, LoggerHandle}, parameters::{Age, Cleanup, Criterion, FileSpec, Naming}, write_mode::{WriteMode, DEFAULT_BUFFER_CAPACITY, DEFAULT_FLUSH_INTERVAL}, }; /// Re-exports from log crate pub use log::{Level, LevelFilter, Record}; pub(crate) const ZERO_DURATION: std::time::Duration = std::time::Duration::from_secs(0); /// Shortest form to get started. /// /// `flexi_logger::init();`. /// /// Equivalent to /// ```rust /// # use flexi_logger::{Logger,LogSpecification}; /// Logger::try_with_env_or_str("info") /// .unwrap_or_else(|_e| Logger::with(LogSpecification::info())) /// .log_to_stderr() /// .start() /// .ok(); /// ``` /// that means, /// /// - you configure the log specification via the environment variable `RUST_LOG`, /// or use the default log specification (`'info'`) /// - logs are directly written to `stderr`, without any buffering, so implicitly dropping the /// `LogHandle` (which is returned from `Logger::start()`) is ok. pub fn init() { Logger::try_with_env_or_str("info") .unwrap_or_else(|_e| Logger::with(LogSpecification::info())) .log_to_stderr() .start() .ok(); } flexi_logger-0.29.8/src/log_specification.rs000064400000000000000000001054501046102023000172150ustar 00000000000000use crate::flexi_error::FlexiLoggerError; use crate::LevelFilter; #[cfg(feature = "textfilter")] use regex::Regex; use std::{collections::HashMap, env}; /// /// Immutable struct that defines which loglines are to be written, /// based on the module, the log level, and the text. /// /// Providing the loglevel specification via `String` /// ([`LogSpecification::parse`] and [`LogSpecification::env`]) /// works essentially like with `env_logger`, /// but we are a bit more tolerant with spaces. Its functionality can be /// described with some Backus-Naur-form: /// /// ```text /// ::= single_log_level_spec[{,single_log_level_spec}][/] /// ::= ||= /// ::= /// ``` /// /// * Examples: /// /// * `"info"`: all logs with info, warn, or error level are written /// * `"crate1"`: all logs of this crate are written, but nothing else /// * `"warn, crate2::mod_a=debug, mod_x::mod_y=trace"`: all crates log warnings and errors, /// `mod_a` additionally debug messages, and `mod_x::mod_y` is fully traced /// /// * If you just specify the module, without `log_level`, all levels will be traced for this /// module. /// * If you just specify a log level, this will be applied as default to all modules without /// explicit log level assigment. /// (You see that for modules named error, warn, info, debug or trace, /// it is necessary to specify their loglevel explicitly). /// * The module names are compared as Strings, with the side effect that a specified module filter /// affects all modules whose name starts with this String.
/// Example: `"foo"` affects e.g. /// /// * `foo` /// * `foo::bar` /// * `foobaz` (!) /// * `foobaz::bar` (!) /// /// The optional text filter is applied for all modules. /// /// Note that external module names are to be specified like in ```"extern crate ..."```, i.e., /// for crates with a dash in their name this means: the dash is to be replaced with /// the underscore (e.g. ```karl_heinz```, not ```karl-heinz```). /// See /// [https://github.com/rust-lang/rfcs/pull/940/files](https://github.com/rust-lang/rfcs/pull/940/files) /// for an explanation of the different naming conventions in Cargo (packages allow hyphen) and /// rustc (“extern crate” does not allow hyphens). #[derive(Clone, Debug, Default)] pub struct LogSpecification { module_filters: Vec, #[cfg(feature = "textfilter")] textfilter: Option>, } /// Defines which loglevel filter to use for the specified module. /// /// A `ModuleFilter`, whose `module_name` is not set, describes the default loglevel filter. #[derive(Clone, Debug, Eq, PartialEq)] pub struct ModuleFilter { /// The module name. pub module_name: Option, /// The level filter. pub level_filter: LevelFilter, } impl LogSpecification { pub(crate) fn update_from(&mut self, other: Self) { self.module_filters = other.module_filters; #[cfg(feature = "textfilter")] { self.textfilter = other.textfilter; } } pub(crate) fn max_level(&self) -> log::LevelFilter { self.module_filters .iter() .map(|d| d.level_filter) .max() .unwrap_or(log::LevelFilter::Off) } /// Returns a `LogSpecification` where all log output is switched off. #[must_use] pub fn off() -> Self { Self::default() } /// Returns a `LogSpecification` where the global tracelevel is set to `LevelFilter::Error`. #[must_use] pub fn error() -> Self { Self::new_with(LevelFilter::Error) } /// Returns a `LogSpecification` where the global tracelevel is set to `LevelFilter::Warn`. #[must_use] pub fn warn() -> Self { Self::new_with(LevelFilter::Warn) } /// Returns a `LogSpecification` where the global tracelevel is set to `LevelFilter::Info`. #[must_use] pub fn info() -> Self { Self::new_with(LevelFilter::Info) } /// Returns a `LogSpecification` where the global tracelevel is set to `LevelFilter::Debug`. #[must_use] pub fn debug() -> Self { Self::new_with(LevelFilter::Debug) } /// Returns a `LogSpecification` where the global tracelevel is set to `LevelFilter::Trace`. #[must_use] pub fn trace() -> Self { Self::new_with(LevelFilter::Trace) } #[must_use] fn new_with(level_filter: LevelFilter) -> Self { Self { module_filters: vec![ModuleFilter { module_name: None, level_filter, }], #[cfg(feature = "textfilter")] textfilter: None, } } /// Returns a log specification from a String. /// /// # Errors /// /// [`FlexiLoggerError::Parse`] if the input is malformed. pub fn parse>(spec: S) -> Result { let mut parse_errs = String::new(); let mut dirs = Vec::::new(); let spec = spec.as_ref(); let mut parts = spec.split('/'); let mods = parts.next(); #[cfg(feature = "textfilter")] let filter = parts.next(); if parts.next().is_some() { push_err( &format!("invalid log spec '{spec}' (too many '/'s), ignoring it"), &mut parse_errs, ); return parse_err(parse_errs, Self::off()); } if let Some(m) = mods { for s in m.split(',') { let s = s.trim(); if s.is_empty() { continue; } let mut parts = s.split('='); let (log_level, name) = match ( parts.next().map(str::trim), parts.next().map(str::trim), parts.next(), ) { (Some(part_0), None, None) => { if contains_whitespace(part_0, &mut parse_errs) { continue; } // if the single argument is a log-level string or number, // treat that as a global fallback setting match parse_level_filter(part_0.trim()) { Ok(num) => (num, None), Err(_) => (LevelFilter::max(), Some(part_0)), } } (Some(part_0), Some(""), None) => { if contains_whitespace(part_0, &mut parse_errs) { continue; } (LevelFilter::max(), Some(part_0)) } (Some(part_0), Some(part_1), None) => { if contains_whitespace(part_0, &mut parse_errs) { continue; } match parse_level_filter(part_1.trim()) { Ok(num) => (num, Some(part_0.trim())), Err(e) => { push_err(&e.to_string(), &mut parse_errs); continue; } } } _ => { push_err( &format!("invalid part in log spec '{s}', ignoring it"), &mut parse_errs, ); continue; } }; dirs.push(ModuleFilter { module_name: name.map(ToString::to_string), level_filter: log_level, }); } } #[cfg(feature = "textfilter")] let textfilter = filter.and_then(|filter| match Regex::new(filter) { Ok(re) => Some(Box::new(re)), Err(e) => { push_err(&format!("invalid regex filter - {e}"), &mut parse_errs); None } }); let logspec = Self { module_filters: dirs.level_sort(), #[cfg(feature = "textfilter")] textfilter, }; if parse_errs.is_empty() { Ok(logspec) } else { parse_err(parse_errs, logspec) } } /// Returns a log specification based on the value of the environment variable `RUST_LOG`, /// or an empty one. /// /// # Errors /// /// [`FlexiLoggerError::Parse`] if the input is malformed. pub fn env() -> Result { match env::var("RUST_LOG") { Ok(spec) => Self::parse(spec), Err(..) => Ok(Self::off()), } } /// Returns a log specification based on the value of the environment variable `RUST_LOG`, /// if it exists and can be parsed, or on the given String. /// /// # Errors /// /// [`FlexiLoggerError::Parse`] if the given spec is malformed. pub fn env_or_parse>(given_spec: S) -> Result { env::var("RUST_LOG") .map_err(|_e| FlexiLoggerError::Poison /*wrong, but only dummy*/) .and_then(Self::parse) .or_else(|_| Self::parse(given_spec.as_ref())) } /// Creates a [`LogSpecBuilder`], which allows building a log spec programmatically. #[must_use] pub fn builder() -> LogSpecBuilder { LogSpecBuilder::new() } /// Reads a log specification from an appropriate toml document. /// /// This method is only avaible with feature `specfile`. /// /// # Errors /// /// [`FlexiLoggerError::Parse`] if the input is malformed. #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] pub fn from_toml>(s: S) -> Result { #[derive(Clone, Debug, serde_derive::Deserialize)] struct LogSpecFileFormat { pub global_level: Option, pub global_pattern: Option, pub modules: Option>, } let s = s.as_ref(); let logspec_ff: LogSpecFileFormat = toml::from_str(s)?; let mut parse_errs = String::new(); let mut module_filters = Vec::::new(); if let Some(s) = logspec_ff.global_level { module_filters.push(ModuleFilter { module_name: None, level_filter: parse_level_filter(s)?, }); } for (k, v) in logspec_ff.modules.unwrap_or_default() { module_filters.push(ModuleFilter { module_name: Some(k), level_filter: parse_level_filter(v)?, }); } #[cfg(feature = "textfilter")] let textfilter = match logspec_ff.global_pattern { None => None, Some(s) => match Regex::new(&s) { Ok(re) => Some(Box::new(re)), Err(e) => { push_err(&format!("invalid regex filter - {e}"), &mut parse_errs); None } }, }; let logspec = Self { module_filters: module_filters.level_sort(), #[cfg(feature = "textfilter")] textfilter, }; if parse_errs.is_empty() { Ok(logspec) } else { parse_err(parse_errs, logspec) } } /// Serializes itself in toml format. /// /// This method is only avaible with feature `specfile`. /// /// # Errors /// /// [`FlexiLoggerError::SpecfileIo`] if writing fails. #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] pub fn to_toml(&self, w: &mut dyn std::io::Write) -> Result<(), FlexiLoggerError> { self.to_toml_impl(w).map_err(FlexiLoggerError::SpecfileIo) } #[cfg(feature = "specfile_without_notification")] fn to_toml_impl(&self, w: &mut dyn std::io::Write) -> Result<(), std::io::Error> { w.write_all(b"### Optional: Default log level\n")?; let last = self.module_filters.last(); match last { Some(last_v) if last_v.module_name.is_none() => { w.write_all( format!( "global_level = '{}'\n", last_v.level_filter.to_string().to_lowercase() ) .as_bytes(), )?; } _ => { w.write_all(b"#global_level = 'info'\n")?; } } w.write_all( b"\n### Optional: specify a regular expression to suppress all messages that don't match\n", )?; w.write_all(b"#global_pattern = 'foo'\n")?; w.write_all( b"\n### Specific log levels per module are optionally defined in this section\n", )?; w.write_all(b"[modules]\n")?; if self.module_filters.is_empty() || self.module_filters[0].module_name.is_none() { w.write_all(b"#'mod1' = 'warn'\n")?; w.write_all(b"#'mod2' = 'debug'\n")?; w.write_all(b"#'mod2::mod3' = 'trace'\n")?; } for mf in &self.module_filters { if let Some(ref name) = mf.module_name { w.write_all( format!( "'{}' = '{}'\n", name, mf.level_filter.to_string().to_lowercase() ) .as_bytes(), )?; } } Ok(()) } /// Returns true if messages on the specified level from the writing module should be written. #[must_use] pub fn enabled(&self, level: log::Level, writing_module: &str) -> bool { // Search for the longest match, the vector is assumed to be pre-sorted. for module_filter in &self.module_filters { match module_filter.module_name { Some(ref module_name) => { if writing_module.starts_with(module_name) { return level <= module_filter.level_filter; } } None => return level <= module_filter.level_filter, } } false } /// Provides a reference to the module filters. #[must_use] pub fn module_filters(&self) -> &Vec { &self.module_filters } /// Provides a reference to the text filter. /// /// This method is only avaible if the default feature `textfilter` is not switched off. #[cfg(feature = "textfilter")] #[must_use] pub fn text_filter(&self) -> Option<&Regex> { self.textfilter.as_deref() } } impl std::fmt::Display for LogSpecification { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut write_comma = false; // Optional: Default log level if let Some(last) = self.module_filters.last() { if last.module_name.is_none() { write!(f, "{}", last.level_filter.to_string().to_lowercase())?; write_comma = true; } } // TODO: global_pattern is not modelled into String representation, only into yaml file // Optional: specify a regular expression to suppress all messages that don't match // w.write_all(b"#global_pattern = 'foo'\n")?; // Specific log levels per module for mf in &self.module_filters { if let Some(ref name) = mf.module_name { if write_comma { write!(f, ", ")?; } write!(f, "{name} = {}", mf.level_filter.to_string().to_lowercase())?; write_comma = true; } } Ok(()) } } impl std::convert::TryFrom<&str> for LogSpecification { type Error = FlexiLoggerError; fn try_from(value: &str) -> Result { LogSpecification::parse(value) } } impl std::convert::TryFrom<&String> for LogSpecification { type Error = FlexiLoggerError; fn try_from(value: &String) -> Result { LogSpecification::parse(value) } } impl From for LogSpecification { fn from(value: LevelFilter) -> Self { match value { LevelFilter::Error => LogSpecification::error(), LevelFilter::Warn => LogSpecification::warn(), LevelFilter::Info => LogSpecification::info(), LevelFilter::Debug => LogSpecification::debug(), LevelFilter::Trace => LogSpecification::trace(), LevelFilter::Off => LogSpecification::off(), } } } fn push_err(s: &str, parse_errs: &mut String) { if !parse_errs.is_empty() { parse_errs.push_str("; "); } parse_errs.push_str(s); } fn parse_err( errors: String, logspec: LogSpecification, ) -> Result { Err(FlexiLoggerError::Parse(errors, logspec)) } fn parse_level_filter>(s: S) -> Result { match s.as_ref().to_lowercase().as_ref() { "off" => Ok(LevelFilter::Off), "error" => Ok(LevelFilter::Error), "warn" => Ok(LevelFilter::Warn), "info" => Ok(LevelFilter::Info), "debug" => Ok(LevelFilter::Debug), "trace" => Ok(LevelFilter::Trace), _ => Err(FlexiLoggerError::LevelFilter(format!( "unknown level filter: {}", s.as_ref() ))), } } fn contains_whitespace(s: &str, parse_errs: &mut String) -> bool { let result = s.chars().any(char::is_whitespace); if result { push_err( &format!("ignoring invalid part in log spec '{s}' (contains a whitespace)"), parse_errs, ); } result } #[allow(clippy::needless_doctest_main)] /// Builder for [`LogSpecification`]. /// /// # Example /// /// Start with a programmatically built log specification, and use the /// [`LoggerHandle`](crate::LoggerHandle) to apply a modified version of the log specification /// at a later point in time: /// /// ```rust /// use flexi_logger::{Logger, LogSpecification}; /// use log::LevelFilter; /// /// fn main() { /// // Build the initial log specification /// let mut builder = LogSpecification::builder(); /// builder /// .default(LevelFilter::Info) /// .module("karl", LevelFilter::Debug); /// /// // Initialize Logger, keep builder alive /// let mut logger = Logger::with(builder.build()) /// // your logger configuration goes here, as usual /// .start() /// .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); /// /// // ... /// /// // Modify builder and update the logger /// builder /// .default(LevelFilter::Error) /// .remove("karl") /// .module("emma", LevelFilter::Trace); /// /// logger.set_new_spec(builder.build()); /// /// // ... /// } /// ``` #[derive(Clone, Debug, Default)] pub struct LogSpecBuilder { module_filters: HashMap, LevelFilter>, } impl LogSpecBuilder { /// Creates a `LogSpecBuilder` with all logging turned off. #[must_use] pub fn new() -> Self { let mut modfilmap = HashMap::new(); modfilmap.insert(None, LevelFilter::Off); Self { module_filters: modfilmap, } } /// Creates a `LogSpecBuilder` from given module filters. #[must_use] pub fn from_module_filters(module_filters: &[ModuleFilter]) -> Self { let mut modfilmap = HashMap::new(); for mf in module_filters { modfilmap.insert(mf.module_name.clone(), mf.level_filter); } Self { module_filters: modfilmap, } } /// Adds a default log level filter, or updates the default log level filter. pub fn default(&mut self, lf: LevelFilter) -> &mut Self { self.module_filters.insert(None, lf); self } /// Adds a log level filter, or updates the log level filter, for a module. pub fn module>(&mut self, module_name: M, lf: LevelFilter) -> &mut Self { self.module_filters .insert(Some(module_name.as_ref().to_owned()), lf); self } /// Adds a log level filter, or updates the log level filter, for a module. pub fn remove>(&mut self, module_name: M) -> &mut Self { self.module_filters .remove(&Some(module_name.as_ref().to_owned())); self } /// Adds log level filters from a `LogSpecification`. pub fn insert_modules_from(&mut self, other: LogSpecification) -> &mut Self { for module_filter in other.module_filters { self.module_filters .insert(module_filter.module_name, module_filter.level_filter); } self } /// Creates a log specification without text filter. #[must_use] pub fn finalize(self) -> LogSpecification { LogSpecification { module_filters: self.module_filters.into_vec_module_filter(), #[cfg(feature = "textfilter")] textfilter: None, } } /// Creates a log specification with text filter. /// /// This method is only avaible if the dafault feature `textfilter` is not switched off. #[cfg(feature = "textfilter")] #[must_use] pub fn finalize_with_textfilter(self, tf: Regex) -> LogSpecification { LogSpecification { module_filters: self.module_filters.into_vec_module_filter(), textfilter: Some(Box::new(tf)), } } /// Creates a log specification without being consumed. #[must_use] pub fn build(&self) -> LogSpecification { LogSpecification { module_filters: self.module_filters.clone().into_vec_module_filter(), #[cfg(feature = "textfilter")] textfilter: None, } } /// Creates a log specification without being consumed, optionally with a text filter. /// /// This method is only avaible if the dafault feature `textfilter` is not switched off. #[cfg(feature = "textfilter")] #[cfg_attr(docsrs, doc(cfg(feature = "textfilter")))] #[must_use] pub fn build_with_textfilter(&self, tf: Option) -> LogSpecification { LogSpecification { module_filters: self.module_filters.clone().into_vec_module_filter(), textfilter: tf.map(Box::new), } } } trait IntoVecModuleFilter { fn into_vec_module_filter(self) -> Vec; } impl IntoVecModuleFilter for HashMap, LevelFilter> { fn into_vec_module_filter(self) -> Vec { let mf: Vec = self .into_iter() .map(|(k, v)| ModuleFilter { module_name: k, level_filter: v, }) .collect(); mf.level_sort() } } trait LevelSort { fn level_sort(self) -> Vec; } impl LevelSort for Vec { /// Sort the module filters by length of their name, /// this allows a little more efficient lookup at runtime. fn level_sort(mut self) -> Vec { self.sort_by(|a, b| { let a_len = a.module_name.as_ref().map_or(0, String::len); let b_len = b.module_name.as_ref().map_or(0, String::len); b_len.cmp(&a_len) }); self } } #[cfg(test)] mod tests { use crate::LogSpecification; use log::{Level, LevelFilter}; #[test] fn parse_roundtrip() { let ss = [ "crate1::mod1 = error, crate1::mod2 = trace, crate2 = debug", "debug, crate1::mod2 = trace, crate2 = error", ]; for s in &ss { let spec = LogSpecification::parse(s).unwrap(); assert_eq!(*s, spec.to_string().as_str()); } assert_eq!("", LogSpecification::default().to_string().as_str()); } #[test] fn parse_logging_spec_valid() { let spec = LogSpecification::parse("crate1::mod1 = error, crate1::mod2, crate2 = debug").unwrap(); assert_eq!(spec.module_filters().len(), 3); assert_eq!( spec.module_filters()[0].module_name, Some("crate1::mod1".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); assert_eq!( spec.module_filters()[1].module_name, Some("crate1::mod2".to_string()) ); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); assert_eq!( spec.module_filters()[2].module_name, Some("crate2".to_string()) ); assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); #[cfg(feature = "textfilter")] assert!(spec.text_filter().is_none()); } #[test] fn parse_logging_spec_invalid_crate() { // test parse_logging_spec with multiple = in specification assert!(LogSpecification::parse("crate1::mod1=warn=info,crate2=debug").is_err()); } #[test] fn parse_logging_spec_wrong_log_level() { assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=warn").is_err()); } #[test] fn parse_logging_spec_empty_log_level() { assert!(LogSpecification::parse("crate1::mod1=wrong, crate2=").is_err()); } #[test] fn parse_logging_spec_global() { let spec = LogSpecification::parse("warn,crate2=debug").unwrap(); assert_eq!(spec.module_filters().len(), 2); assert_eq!(spec.module_filters()[1].module_name, None); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Warn); assert_eq!( spec.module_filters()[0].module_name, Some("crate2".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Debug); #[cfg(feature = "textfilter")] assert!(spec.text_filter().is_none()); } #[test] #[cfg(feature = "textfilter")] fn parse_logging_spec_valid_filter() { let spec = LogSpecification::parse(" crate1::mod1 = error , crate1::mod2,crate2=debug/abc") .unwrap(); assert_eq!(spec.module_filters().len(), 3); assert_eq!( spec.module_filters()[0].module_name, Some("crate1::mod1".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Error); assert_eq!( spec.module_filters()[1].module_name, Some("crate1::mod2".to_string()) ); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::max()); assert_eq!( spec.module_filters()[2].module_name, Some("crate2".to_string()) ); assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Debug); assert!( spec.text_filter().is_some() && spec.text_filter().as_ref().unwrap().to_string() == "abc" ); } #[test] fn parse_logging_spec_invalid_crate_filter() { assert!(LogSpecification::parse("crate1::mod1=error=warn,crate2=debug/a.c").is_err()); } #[test] #[cfg(feature = "textfilter")] fn parse_logging_spec_empty_with_filter() { let spec = LogSpecification::parse("crate1/a*c").unwrap(); assert_eq!(spec.module_filters().len(), 1); assert_eq!( spec.module_filters()[0].module_name, Some("crate1".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::max()); assert!( spec.text_filter().is_some() && spec.text_filter().as_ref().unwrap().to_string() == "a*c" ); } #[test] fn reuse_logspec_builder() { let mut builder = crate::LogSpecBuilder::new(); builder.default(LevelFilter::Info); builder.module("carlo", LevelFilter::Debug); builder.module("toni", LevelFilter::Warn); let spec1 = builder.build(); assert_eq!( spec1.module_filters()[0].module_name, Some("carlo".to_string()) ); assert_eq!(spec1.module_filters()[0].level_filter, LevelFilter::Debug); assert_eq!( spec1.module_filters()[1].module_name, Some("toni".to_string()) ); assert_eq!(spec1.module_filters()[1].level_filter, LevelFilter::Warn); assert_eq!(spec1.module_filters().len(), 3); assert_eq!(spec1.module_filters()[2].module_name, None); assert_eq!(spec1.module_filters()[2].level_filter, LevelFilter::Info); builder.default(LevelFilter::Error); builder.remove("carlo"); builder.module("greta", LevelFilter::Trace); let spec2 = builder.build(); assert_eq!(spec2.module_filters().len(), 3); assert_eq!(spec2.module_filters()[2].module_name, None); assert_eq!(spec2.module_filters()[2].level_filter, LevelFilter::Error); assert_eq!( spec2.module_filters()[0].module_name, Some("greta".to_string()) ); assert_eq!(spec2.module_filters()[0].level_filter, LevelFilter::Trace); assert_eq!( spec2.module_filters()[1].module_name, Some("toni".to_string()) ); assert_eq!(spec2.module_filters()[1].level_filter, LevelFilter::Warn); } /////////////////////////////////////////////////////// /////////////////////////////////////////////////////// #[test] fn match_full_path() { let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); assert!(spec.enabled(Level::Warn, "crate1::mod1")); assert!(!spec.enabled(Level::Info, "crate1::mod1")); assert!(spec.enabled(Level::Info, "crate2")); assert!(!spec.enabled(Level::Debug, "crate2")); } #[test] fn no_match() { let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); assert!(!spec.enabled(Level::Warn, "crate3")); } #[test] fn match_beginning() { let spec = LogSpecification::parse("crate2=info,crate1::mod1=warn").unwrap(); assert!(spec.enabled(Level::Info, "crate2::mod1")); } #[test] fn match_beginning_longest_match() { let spec = LogSpecification::parse( "abcd = info, abcd::mod1 = error, klmn::mod = debug, klmn = info", ) .unwrap(); assert!(spec.enabled(Level::Error, "abcd::mod1::foo")); assert!(!spec.enabled(Level::Warn, "abcd::mod1::foo")); assert!(spec.enabled(Level::Warn, "abcd::mod2::foo")); assert!(!spec.enabled(Level::Debug, "abcd::mod2::foo")); assert!(!spec.enabled(Level::Debug, "klmn")); assert!(!spec.enabled(Level::Debug, "klmn::foo::bar")); assert!(spec.enabled(Level::Info, "klmn::foo::bar")); } #[test] fn match_default1() { let spec = LogSpecification::parse("info,abcd::mod1=warn").unwrap(); assert!(spec.enabled(Level::Warn, "abcd::mod1")); assert!(spec.enabled(Level::Info, "crate2::mod2")); } #[test] fn match_default2() { let spec = LogSpecification::parse("modxyz=error, info, abcd::mod1=warn").unwrap(); assert!(spec.enabled(Level::Warn, "abcd::mod1")); assert!(spec.enabled(Level::Info, "crate2::mod2")); } #[test] fn rocket() { let spec = LogSpecification::parse("info, rocket=off, serenity=off").unwrap(); assert!(spec.enabled(Level::Info, "itsme")); assert!(spec.enabled(Level::Warn, "abcd::mod1")); assert!(!spec.enabled(Level::Debug, "abcd::mod1")); assert!(!spec.enabled(Level::Error, "rocket::rocket")); assert!(!spec.enabled(Level::Warn, "rocket::rocket")); assert!(!spec.enabled(Level::Info, "rocket::rocket")); } #[test] fn add_filters() { let mut builder = crate::LogSpecBuilder::new(); builder.default(LevelFilter::Debug); builder.module("carlo", LevelFilter::Debug); builder.module("toni", LevelFilter::Warn); builder.insert_modules_from( LogSpecification::parse("info, may=error, toni::heart = trace").unwrap(), ); let spec = builder.build(); assert_eq!(spec.module_filters().len(), 5); assert_eq!( spec.module_filters()[0].module_name, Some("toni::heart".to_string()) ); assert_eq!(spec.module_filters()[0].level_filter, LevelFilter::Trace); assert_eq!( spec.module_filters()[1].module_name, Some("carlo".to_string()) ); assert_eq!(spec.module_filters()[1].level_filter, LevelFilter::Debug); assert_eq!( spec.module_filters()[2].module_name, Some("toni".to_string()) ); assert_eq!(spec.module_filters()[2].level_filter, LevelFilter::Warn); assert_eq!( spec.module_filters()[3].module_name, Some("may".to_string()) ); assert_eq!(spec.module_filters()[3].level_filter, LevelFilter::Error); assert_eq!(spec.module_filters()[4].module_name, None); assert_eq!(spec.module_filters()[4].level_filter, LevelFilter::Info); } #[test] fn zero_level() { let spec = LogSpecification::parse("info,crate1::mod1=off").unwrap(); assert!(!spec.enabled(Level::Error, "crate1::mod1")); assert!(spec.enabled(Level::Info, "crate2::mod2")); } } #[cfg(test)] #[cfg(feature = "specfile_without_notification")] mod test_with_specfile { #[cfg(feature = "specfile_without_notification")] use crate::LogSpecification; #[test] fn specfile() { compare_specs("", ""); compare_specs( "[modules]\n\ ", "", ); compare_specs( "global_level = 'info'\n\ \n\ [modules]\n\ ", "info", ); compare_specs( "global_level = 'info'\n\ \n\ [modules]\n\ 'mod1::mod2' = 'debug'\n\ 'mod3' = 'trace'\n\ ", "info, mod1::mod2 = debug, mod3 = trace", ); compare_specs( "global_level = 'info'\n\ global_pattern = 'Foo'\n\ \n\ [modules]\n\ 'mod1::mod2' = 'debug'\n\ 'mod3' = 'trace'\n\ ", "info, mod1::mod2 = debug, mod3 = trace /Foo", ); } #[cfg(feature = "specfile_without_notification")] fn compare_specs(toml: &str, spec_string: &str) { let ls_toml = LogSpecification::from_toml(toml).unwrap(); let ls_spec = LogSpecification::parse(spec_string).unwrap(); assert_eq!(ls_toml.module_filters, ls_spec.module_filters); assert_eq!(ls_toml.textfilter.is_none(), ls_spec.textfilter.is_none()); if ls_toml.textfilter.is_some() && ls_spec.textfilter.is_some() { assert_eq!( ls_toml.textfilter.unwrap().to_string(), ls_spec.textfilter.unwrap().to_string() ); } } } flexi_logger-0.29.8/src/logger.rs000064400000000000000000001116171046102023000150150ustar 00000000000000#[cfg(feature = "colors")] use crate::set_palette; use crate::{ filter::LogLineFilter, flexi_logger::FlexiLogger, formats::default_format, primary_writer::PrimaryWriter, threads::start_flusher_thread, util::{set_error_channel, set_panic_on_error_channel_error}, writers::{FileLogWriter, FileLogWriterBuilder, LogWriter}, Cleanup, Criterion, DeferredNow, FileSpec, FlexiLoggerError, FormatFunction, LogSpecification, LoggerHandle, Naming, WriteMode, }; use crate::{formats::AdaptiveFormat, ZERO_DURATION}; use log::LevelFilter; #[cfg(feature = "specfile")] use std::sync::Mutex; use std::{ collections::HashMap, io::IsTerminal, path::PathBuf, sync::{Arc, RwLock}, }; #[cfg(feature = "specfile_without_notification")] use {crate::logger_handle::LogSpecSubscriber, std::io::Read, std::path::Path}; #[cfg(feature = "specfile")] use { crate::util::{eprint_err, ErrorCode}, notify_debouncer_mini::{ new_debouncer, notify::{RecommendedWatcher, RecursiveMode}, DebounceEventResult, Debouncer, }, }; /// The entry-point for using `flexi_logger`. /// /// `Logger` is a builder class that allows you to /// * specify your desired (initial) loglevel-specification /// * either as a String ([`Logger::try_with_str`]) /// * or by providing it in the environment ([`Logger::try_with_env`]), /// * or by combining both options ([`Logger::try_with_env_or_str`]), /// * or by building a [`LogSpecification`] programmatically ([`Logger::with`]), /// * use the desired configuration methods, /// * and finally start the logger with /// /// * [`Logger::start`], or /// * [`Logger::start_with_specfile`]. /// /// # Usage /// /// See [`code_examples`](code_examples/index.html) for a comprehensive list of usage possibilities. pub struct Logger { spec: LogSpecification, log_target: LogTarget, duplicate_err: Duplicate, duplicate_out: Duplicate, format_for_file: FormatFunction, format_for_stderr: FormatFunction, format_for_stdout: FormatFunction, format_for_writer: FormatFunction, #[cfg(feature = "colors")] o_palette: Option, flush_interval: std::time::Duration, flwb: FileLogWriterBuilder, other_writers: HashMap>, filter: Option>, error_channel: ErrorChannel, use_utc: bool, panic_on_error_channel_error: bool, } enum LogTarget { StdErr, StdOut, Multi(bool, Option>), } /// Create a Logger instance and define how to access the (initial) /// loglevel-specification. impl Logger { /// Creates a Logger that you provide with an explicit [`LogSpecification`]. /// /// ## Examples /// /// ```rust /// use log::LevelFilter; /// use flexi_logger::Logger; /// let logger = Logger::with(LevelFilter::Info).start().unwrap(); /// ``` /// /// ```rust /// use flexi_logger::{Logger, LogSpecification}; /// let logger = Logger::with( /// LogSpecification::parse("info, critical_mod = trace").unwrap() /// ).start().unwrap(); /// ``` #[must_use] pub fn with(logspec: impl Into) -> Self { Self::from_spec_and_errs(logspec.into()) } /// Creates a Logger that reads the [`LogSpecification`] from a `String` or `&str`. /// See [`LogSpecification`] for the syntax. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the String uses an erroneous syntax. pub fn try_with_str>(s: S) -> Result { Ok(Self::from_spec_and_errs(LogSpecification::parse( s.as_ref(), )?)) } /// Creates a Logger that reads the [`LogSpecification`] from the environment variable /// `RUST_LOG`. /// /// Note that if `RUST_LOG` is not set, nothing is logged. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the value of `RUST_LOG` is malformed. pub fn try_with_env() -> Result { Ok(Self::from_spec_and_errs(LogSpecification::env()?)) } /// Creates a Logger that reads the [`LogSpecification`] from the environment variable /// `RUST_LOG`, or derives it from the given `String`, if `RUST_LOG` is not set. /// /// # Errors /// /// `FlexiLoggerError::Parse` if the chosen value is malformed. pub fn try_with_env_or_str>(s: S) -> Result { Ok(Self::from_spec_and_errs(LogSpecification::env_or_parse(s)?)) } fn from_spec_and_errs(spec: LogSpecification) -> Self { #[cfg(feature = "colors")] #[cfg(windows)] { nu_ansi_term::enable_ansi_support().ok(); } Self { spec, log_target: LogTarget::StdErr, duplicate_err: Duplicate::None, duplicate_out: Duplicate::None, format_for_file: default_format, #[cfg(feature = "colors")] format_for_stdout: AdaptiveFormat::Default .format_function(std::io::stdout().is_terminal()), #[cfg(feature = "colors")] format_for_stderr: AdaptiveFormat::Default .format_function(std::io::stderr().is_terminal()), #[cfg(not(feature = "colors"))] format_for_stdout: default_format, #[cfg(not(feature = "colors"))] format_for_stderr: default_format, format_for_writer: default_format, #[cfg(feature = "colors")] o_palette: None, flush_interval: ZERO_DURATION, flwb: FileLogWriter::builder(FileSpec::default()), other_writers: HashMap::>::new(), filter: None, error_channel: ErrorChannel::default(), use_utc: false, panic_on_error_channel_error: true, } } } /// Simple methods for influencing the behavior of the Logger. impl Logger { /// Log is written to stderr (which is the default). #[must_use] pub fn log_to_stderr(mut self) -> Self { self.log_target = LogTarget::StdErr; self } /// Log is written to stdout. #[must_use] pub fn log_to_stdout(mut self) -> Self { self.log_target = LogTarget::StdOut; self } /// Log is written to a file. /// /// See [`FileSpec`] for details about the filename pattern. /// /// You can duplicate to stdout and stderr, and you can add additional writers. #[must_use] pub fn log_to_file(mut self, file_spec: FileSpec) -> Self { self.log_target = LogTarget::Multi(true, None); self.flwb = self.flwb.file_spec(file_spec); self } /// Log is written to the provided writer. /// /// You can duplicate to stdout and stderr, and you can add additional writers. #[must_use] pub fn log_to_writer(mut self, w: Box) -> Self { self.log_target = LogTarget::Multi(false, Some(w)); self } /// Log is written to a file, as with [`Logger::log_to_file`], _and_ to an alternative /// [`LogWriter`] implementation. /// /// And you can duplicate to stdout and stderr, and you can add additional writers. #[must_use] pub fn log_to_file_and_writer(mut self, file_spec: FileSpec, w: Box) -> Self { self.log_target = LogTarget::Multi(true, Some(w)); self.flwb = self.flwb.file_spec(file_spec); self } /// Log is processed, including duplication, but not written to any destination. /// /// This can be useful e.g. for running application tests with all log-levels active and still /// avoiding tons of log files etc. /// Such tests ensure that the log calls which are normally not active /// will not cause undesired side-effects when activated /// (note that the log macros may prevent arguments of inactive log-calls from being evaluated). /// /// Or, if you want to get logs both to stdout and stderr, but nowhere else, /// then use this option and combine it with /// [`Logger::duplicate_to_stdout`] and [`Logger::duplicate_to_stderr`]. #[must_use] pub fn do_not_log(mut self) -> Self { self.log_target = LogTarget::Multi(false, None); self } /// Makes the logger print an info message to stdout with the name of the logfile /// when a logfile is opened for writing. #[must_use] pub fn print_message(mut self) -> Self { self.flwb = self.flwb.print_message(); self } /// Makes the logger write messages with the specified minimum severity additionally to stderr. /// /// Does not work with [`Logger::log_to_stdout`] or [`Logger::log_to_stderr`]. #[must_use] pub fn duplicate_to_stderr(mut self, dup: Duplicate) -> Self { self.duplicate_err = dup; self } /// Makes the logger write messages with the specified minimum severity additionally to stdout. /// /// Does not work with [`Logger::log_to_stdout`] or [`Logger::log_to_stderr`]. #[must_use] pub fn duplicate_to_stdout(mut self, dup: Duplicate) -> Self { self.duplicate_out = dup; self } /// Makes the logger use the provided format function for all messages /// that are written to files, stderr, stdout, or to an additional writer. /// /// You can either choose one of the provided log-line formatters, /// or you create and use your own format function with the signature
/// ```rust /// fn my_format( /// write: &mut dyn std::io::Write, /// now: &mut flexi_logger::DeferredNow, /// record: &log::Record, /// ) -> std::io::Result<()> /// # {unimplemented!("")} /// ``` /// /// By default, [`default_format`] is used for output to files and to custom writers, /// and [`AdaptiveFormat::Default`] is used for output to `stderr` and `stdout`. /// If the feature `colors` is switched off, [`default_format`] is used for all outputs. #[must_use] pub fn format(mut self, format: FormatFunction) -> Self { self.format_for_file = format; self.format_for_stderr = format; self.format_for_stdout = format; self.format_for_writer = format; self } /// Makes the logger use the provided format function for messages /// that are written to files. /// /// Regarding the default, see [`Logger::format`]. #[must_use] pub fn format_for_files(mut self, format: FormatFunction) -> Self { self.format_for_file = format; self } /// Makes the logger use the provided format function for messages /// that are written to stderr. /// /// Regarding the default, see [`Logger::format`]. #[must_use] pub fn format_for_stderr(mut self, format_function: FormatFunction) -> Self { self.format_for_stderr = format_function; self } /// Makes the logger use the specified format for messages that are written to `stderr`. /// Coloring is used if `stderr` is a tty. /// /// Regarding the default, see [`Logger::format`]. #[must_use] pub fn adaptive_format_for_stderr(mut self, adaptive_format: AdaptiveFormat) -> Self { self.format_for_stderr = adaptive_format.format_function(std::io::stderr().is_terminal()); self } /// Makes the logger use the provided format function to format messages /// that are written to stdout. /// /// Regarding the default, see [`Logger::format`]. #[must_use] pub fn format_for_stdout(mut self, format_function: FormatFunction) -> Self { self.format_for_stdout = format_function; self } /// Makes the logger use the specified format for messages that are written to `stdout`. /// Coloring is used if `stdout` is a tty. /// /// Regarding the default, see [`Logger::format`]. #[must_use] pub fn adaptive_format_for_stdout(mut self, adaptive_format: AdaptiveFormat) -> Self { self.format_for_stdout = adaptive_format.format_function(std::io::stdout().is_terminal()); self } /// Allows specifying a format function for an additional writer. /// Note that it is up to the implementation of the additional writer /// whether it evaluates this setting or not. /// /// Regarding the default, see [`Logger::format`]. #[must_use] pub fn format_for_writer(mut self, format: FormatFunction) -> Self { self.format_for_writer = format; self } /// Sets the color palette for function [`style`](crate::style), which is used in the /// provided coloring format functions. /// /// The palette given here overrides the default palette. /// /// The palette is specified in form of a String that contains a semicolon-separated list /// of numbers (0..=255) and/or dashes (´-´). /// The first five values denote the fixed color that is /// used for coloring `error`, `warn`, `info`, `debug`, and `trace` messages. /// /// The String `"196;208;-;7;8"` describes the default palette, where color 196 is /// used for error messages, and so on. The `-` means that no coloring is done, /// i.e., with `"-;-;-;-;-"` all coloring is switched off. /// /// Prefixing a number with 'b' makes the output being written in bold. /// The String `"b1;3;2;4;6"` e.g. describes the palette used by `env_logger`. /// /// The palette can further be overridden at runtime by setting the environment variable /// `FLEXI_LOGGER_PALETTE` to a palette String. This allows adapting the used text colors to /// differently colored terminal backgrounds. /// /// For your convenience, if you want to specify your own palette, /// you can produce a colored list with all 255 colors with `cargo run --example colors`. #[cfg_attr(docsrs, doc(cfg(feature = "colors")))] #[cfg(feature = "colors")] #[must_use] pub fn set_palette(mut self, palette: String) -> Self { self.o_palette = Some(palette); self } /// Prevent indefinite growth of the log file by applying file rotation /// and a clean-up strategy for older log files. /// /// By default, the log file is fixed while your program is running and will grow indefinitely. /// With this option being used, when the log file reaches the specified criterion, /// the file will be closed and a new file will be opened. /// /// Note that also the filename pattern changes: /// /// - by default, no timestamp is added to the filename if rotation is used /// - the logs are always written to a file with infix `_rCURRENT` /// - when the rotation criterion is fulfilled, it is closed and renamed to a file /// with another infix (see `Naming`), /// and then the logging continues again to the (fresh) file with infix `_rCURRENT`. /// /// Example: /// /// After some logging with your program `my_prog` and rotation with `Naming::Numbers`, /// you will find files like /// /// ```text /// my_prog_r00000.log /// my_prog_r00001.log /// my_prog_r00002.log /// my_prog_rCURRENT.log /// ``` /// /// ## Parameters /// /// `criterion` defines *when* the log file should be rotated, based on its size or age. /// See [`Criterion`] for details. /// /// `naming` defines the naming convention for the rotated log files. /// See [`Naming`] for details. /// /// `cleanup` defines the strategy for dealing with older files. /// See [`Cleanup`] for details. #[must_use] pub fn rotate(mut self, criterion: Criterion, naming: Naming, cleanup: Cleanup) -> Self { self.flwb = self.flwb.rotate(criterion, naming, cleanup); self } /// When [`Logger::rotate`] is used with some [`Cleanup`] variant other than [`Cleanup::Never`], /// then this method can be used to define /// if the cleanup activities (finding files, deleting files, evtl compressing files) are /// delegated to a background thread (which is the default, /// to minimize the blocking impact to your application caused by IO operations), /// or whether they are done synchronously in the current log-call. /// /// If you call this method with `use_background_thread = false`, /// the cleanup is done synchronously. #[must_use] pub fn cleanup_in_background_thread(mut self, use_background_thread: bool) -> Self { self.flwb = self .flwb .cleanup_in_background_thread(use_background_thread); self } /// Apply the provided filter before really writing log lines. /// /// See the documentation of module [`filter`](crate::filter) for a usage example. #[must_use] pub fn filter(mut self, filter: Box) -> Self { self.filter = Some(filter); self } /// Makes the logger append to the specified output file, if it exists already; /// by default, the file would be truncated. /// /// This option only has an effect if logs are written to files, but /// it will hardly make an effect if [`FileSpec::suppress_timestamp`] is not used. #[must_use] pub fn append(mut self) -> Self { self.flwb = self.flwb.append(); self } /// Makes the logger use UTC timestamps rather than local timestamps. #[must_use] pub fn use_utc(mut self) -> Self { self.use_utc = true; self } /// The specified path will be used on unix systems to create a symbolic link /// to the current log file. /// /// This option has no effect on filesystems where symlinks are not supported, /// and it only has an effect if logs are written to files. /// /// ### Example /// /// You can use the symbolic link to follow the log output with `tail`, /// even if the log files are rotated. /// /// Assuming you use `create_symlink("link_to_log_file")`, then use: /// /// ```text /// tail --follow=name --max-unchanged-stats=1 --retry link_to_log_file /// ``` /// #[must_use] pub fn create_symlink>(mut self, symlink: P) -> Self { self.flwb = self.flwb.create_symlink(symlink); self } /// Registers a [`LogWriter`] implementation under the given target name. /// /// The target name must not start with an underscore. /// See module [`writers`](crate::writers) for more details. #[must_use] pub fn add_writer>( mut self, target_name: S, writer: Box, ) -> Self { self.other_writers.insert(target_name.into(), writer); self } /// Sets the write mode for the logger. /// /// See [`WriteMode`] for more (important!) details. #[must_use] pub fn write_mode(mut self, write_mode: WriteMode) -> Self { self.flwb = self.flwb.write_mode(write_mode.without_flushing()); self.flush_interval = write_mode.get_flush_interval(); self } /// Use Windows line endings, rather than just `\n`. #[must_use] pub fn use_windows_line_ending(mut self) -> Self { self.flwb = self.flwb.use_windows_line_ending(); self } /// Define the output channel for `flexi_logger`'s own error messages. /// /// These are only written if `flexi_logger` cannot do what it is supposed to do. /// Under normal circumstances no single message should appear. /// /// By default these error messages are printed to `stderr`. #[must_use] pub fn error_channel(mut self, error_channel: ErrorChannel) -> Self { self.error_channel = error_channel; self } /// Decides what `flexi_logger` should do if the error output channel cannot be written to. /// /// By default, it will panic if error messages cannot be written to the chosen /// error output channel. /// Calling this method with `false` will let `flexi_logger` ignore the issue and suppress /// the error messages. #[must_use] pub fn panic_if_error_channel_is_broken(mut self, panic: bool) -> Self { self.panic_on_error_channel_error = panic; self } } /// Enum for defining the output channel for `flexi_logger`'s own error messages. /// /// These are only written if `flexi_logger` cannot do what it is supposed to do, /// so under normal circumstances no single message shuld appear. /// /// By default these error messages are printed to `stderr`. #[derive(Debug, Default)] pub enum ErrorChannel { /// Write `flexi_logger`'s own error messages to `stderr`. #[default] StdErr, /// Write `flexi_logger`'s own error messages to `stdout`. StdOut, /// Write `flexi_logger`'s own error messages to the specified file. File(PathBuf), /// Don't write `flexi_logger`'s own error messages. DevNull, } /// Alternative set of methods to control the behavior of the Logger. /// Use these methods when you want to control the settings flexibly, /// e.g. with commandline arguments via `docopts` or `clap`. impl Logger { /// With true, makes the logger print an info message to stdout, each time /// when a new file is used for log-output. #[must_use] pub fn o_print_message(mut self, print_message: bool) -> Self { self.flwb = self.flwb.o_print_message(print_message); self } /// By default, and with None, the log file will grow indefinitely. /// If a `rotate_config` is set, when the log file reaches or exceeds the specified size, /// the file will be closed and a new file will be opened. /// Also the filename pattern changes: instead of the timestamp, a serial number /// is included into the filename. /// /// The size is given in bytes, e.g. `o_rotate_over_size(Some(1_000))` will rotate /// files once they reach a size of 1 kB. /// /// The cleanup strategy allows delimiting the used space on disk. #[must_use] pub fn o_rotate(mut self, rotate_config: Option<(Criterion, Naming, Cleanup)>) -> Self { self.flwb = self.flwb.o_rotate(rotate_config); self } /// If append is set to true, makes the logger append to the specified output file, if it exists. /// By default, or with false, the file would be truncated. /// /// This option only has an effect if logs are written to files, /// and it will hardly make an effect if `suppress_timestamp()` is not used. #[must_use] pub fn o_append(mut self, append: bool) -> Self { self.flwb = self.flwb.o_append(append); self } /// If a String is specified, it will be used on unix systems to create in the current folder /// a symbolic link with this name to the current log file. /// /// This option only has an effect on unix systems and if logs are written to files. #[must_use] pub fn o_create_symlink>(mut self, symlink: Option

) -> Self { self.flwb = self.flwb.o_create_symlink(symlink); self } } /// Finally, start logging, optionally with a spec-file. impl Logger { /// Consumes the Logger object and initializes `flexi_logger`. /// /// **Keep the [`LoggerHandle`] alive up to the very end of your program!** /// Dropping the [`LoggerHandle`] flushes and shuts down [`FileLogWriter`]s /// and other [`LogWriter`]s, and then may prevent further logging! /// This should happen immediately before the program terminates, but not earlier. /// /// Dropping the [`LoggerHandle`] is uncritical /// only with [`Logger::log_to_stdout`] or [`Logger::log_to_stderr`]. /// /// The [`LoggerHandle`] also allows updating the log specification programmatically, /// e.g. to intensify logging for (buggy) parts of a (test) program, etc. /// /// # Example /// /// ```rust /// use flexi_logger::{Logger,WriteMode, FileSpec}; /// fn main() -> Result<(), Box> { /// let _logger = Logger::try_with_str("info")? /// .log_to_file(FileSpec::default()) /// .write_mode(WriteMode::BufferAndFlush) /// .start()?; /// /// // ... do all your work and join back all threads whose logs you want to see ... /// /// Ok(()) /// } /// ``` /// /// # Errors /// /// Several variants of [`FlexiLoggerError`] can occur. pub fn start(self) -> Result { let (boxed_logger, handle) = self.build()?; log::set_boxed_logger(boxed_logger)?; Ok(handle) } /// Builds a boxed logger and a `LoggerHandle` for it, /// but does not initialize the global logger. /// /// The returned boxed logger implements the [`Log`](log::Log) trait /// and can be installed manually or nested within another logger. /// /// **Keep the [`LoggerHandle`] alive up to the very end of your program!** /// See [`Logger::start`] for more details. /// /// # Errors /// /// Several variants of [`FlexiLoggerError`] can occur. pub fn build(mut self) -> Result<(Box, LoggerHandle), FlexiLoggerError> { #[cfg(feature = "colors")] set_palette(self.o_palette.as_deref())?; if self.use_utc { self.flwb = self.flwb.use_utc(); } set_panic_on_error_channel_error(self.panic_on_error_channel_error); let a_primary_writer = Arc::new(match self.log_target { LogTarget::StdOut => { if let WriteMode::SupportCapture = self.flwb.get_write_mode() { PrimaryWriter::test(true, self.format_for_stdout) } else { PrimaryWriter::stdout(self.format_for_stdout, self.flwb.get_write_mode()) } } LogTarget::StdErr => { if let WriteMode::SupportCapture = self.flwb.get_write_mode() { PrimaryWriter::test(false, self.format_for_stderr) } else { PrimaryWriter::stderr(self.format_for_stderr, self.flwb.get_write_mode()) } } LogTarget::Multi(use_file, mut o_writer) => PrimaryWriter::multi( self.duplicate_err, self.duplicate_out, WriteMode::SupportCapture == *self.flwb.get_write_mode(), self.format_for_stderr, self.format_for_stdout, if use_file { Some(Box::new( self.flwb.format(self.format_for_file).try_build()?, )) } else { None }, { if let Some(ref mut writer) = o_writer { writer.format(self.format_for_writer); } o_writer }, ), }); let a_other_writers = Arc::new(self.other_writers); if self.flush_interval != ZERO_DURATION { start_flusher_thread( Arc::clone(&a_primary_writer), Arc::clone(&a_other_writers), self.flush_interval, )?; } let max_level = self.spec.max_level(); let a_l_spec = Arc::new(RwLock::new(self.spec)); set_error_channel(self.error_channel); // initialize the lazy_statics in DeferredNow before threads are spawned if self.use_utc { DeferredNow::force_utc(); } let mut now = DeferredNow::new(); now.now(); let flexi_logger = FlexiLogger::new( Arc::clone(&a_l_spec), Arc::clone(&a_primary_writer), Arc::clone(&a_other_writers), self.filter, ); let handle = LoggerHandle::new(a_l_spec, a_primary_writer, a_other_writers); handle.reconfigure(max_level); Ok((Box::new(flexi_logger), handle)) } /// Consumes the Logger object and initializes `flexi_logger` in a way that /// subsequently the log specification can be updated, /// while the program is running, by editing a file. /// /// Uses the spec that was given to the factory method ([`Logger::with`] etc) /// as initial spec and then tries to read the logspec from a file. /// /// If the file does not exist, `flexi_logger` creates the file and fills it /// with the initial spec (and in the respective file format, of course). /// /// **Keep the returned [`LoggerHandle`] alive up to the very end of your program!** /// See [`Logger::start`] for more details. /// /// # Feature dependency /// /// The implementation of this configuration method uses some additional crates /// that you might not want to depend on with your program if you don't use this functionality. /// For that reason the method is only available if you activate the /// `specfile` feature. See the usage section on /// [crates.io](https://crates.io/crates/flexi_logger) for details. /// /// # Usage /// /// A logger initialization like /// /// ```rust,no_run /// use flexi_logger::Logger; /// Logger::try_with_str("info") /// .unwrap() /// // more logger configuration /// .start_with_specfile("logspecification.toml"); /// ``` /// /// will create the file `logspecification.toml` (if it does not yet exist) with this content: /// /// ```toml /// ### Optional: Default log level /// global_level = 'info' /// ### Optional: specify a regular expression to suppress all messages that don't match /// #global_pattern = 'foo' /// /// ### Specific log levels per module are optionally defined in this section /// [modules] /// #'mod1' = 'warn' /// #'mod2' = 'debug' /// #'mod2::mod3' = 'trace' /// ``` /// /// You can subsequently edit and modify the file according to your needs, /// while the program is running, and it will immediately take your changes into account. /// /// Currently only toml-files are supported, the file suffix thus must be `.toml`. /// /// The initial spec remains valid if the file cannot be read. /// /// If you update the specfile subsequently while the program is running, `flexi_logger` /// re-reads it automatically and adapts its behavior according to the new content. /// If the file cannot be read anymore, e.g. because the format is not correct, the /// previous logspec remains active. /// If the file is corrected subsequently, the log spec update will work again. /// /// # Errors /// /// Several variants of [`FlexiLoggerError`] can occur. #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] #[cfg(feature = "specfile_without_notification")] pub fn start_with_specfile>( self, specfile: P, ) -> Result { let (boxed_logger, handle) = self.build_with_specfile(specfile)?; log::set_boxed_logger(boxed_logger)?; Ok(handle) } /// Builds a boxed logger and a `LoggerHandle` for it, /// but does not initialize the global logger. /// /// See also [`Logger::start`] and [`Logger::start_with_specfile`]. /// for the properties of the returned logger. /// /// # Errors /// /// Several variants of [`FlexiLoggerError`] can occur. #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] #[cfg(feature = "specfile_without_notification")] pub fn build_with_specfile>( self, specfile: P, ) -> Result<(Box, LoggerHandle), FlexiLoggerError> { let (boxed_log, mut handle) = self.build()?; let specfile = specfile.as_ref(); synchronize_subscriber_with_specfile(&mut handle.writers_handle, specfile)?; #[cfg(feature = "specfile")] { handle.oam_specfile_watcher = Some(Arc::new(Mutex::new(create_specfile_watcher( specfile, handle.writers_handle.clone(), )?))); } Ok((boxed_log, handle)) } } // Reread the specfile when it was updated #[cfg(feature = "specfile")] pub(crate) fn create_specfile_watcher( specfile: &Path, mut subscriber: S, ) -> Result, FlexiLoggerError> { let specfile = specfile .canonicalize() .map_err(FlexiLoggerError::SpecfileIo)?; let clone = specfile.clone(); let parent = clone.parent().unwrap(/*cannot fail*/); let mut debouncer = new_debouncer( std::time::Duration::from_millis(1000), move |res: DebounceEventResult| match res { Ok(events) => events.iter().for_each(|e| { if e.path .canonicalize() .map(|x| x == specfile) .unwrap_or(false) { log_spec_string_from_file(&specfile) .map_err(FlexiLoggerError::SpecfileIo) .and_then(LogSpecification::from_toml) .and_then(|spec| subscriber.set_new_spec(spec)) .map_err(|e| { eprint_err( ErrorCode::LogSpecFile, "continuing with previous log specification, because \ rereading the log specification file failed", &e, ); }) .ok(); } }), Err(e) => eprint_err( ErrorCode::LogSpecFile, "error while watching the specfile", &e, ), }, ) .unwrap(); debouncer .watcher() .watch(parent, RecursiveMode::NonRecursive) .unwrap(); Ok(debouncer) } // If the specfile exists, read the file and update the subscriber's logspec from it; // otherwise try to create the file, with the current spec as content, under the specified name. #[cfg(feature = "specfile_without_notification")] pub(crate) fn synchronize_subscriber_with_specfile( subscriber: &mut S, specfile: &Path, ) -> Result<(), FlexiLoggerError> { if specfile .extension() .unwrap_or_else(|| std::ffi::OsStr::new("")) .to_str() .unwrap_or("") != "toml" { return Err(FlexiLoggerError::SpecfileExtension( "only spec files with extension toml are supported", )); } if Path::is_file(specfile) { let s = log_spec_string_from_file(specfile).map_err(FlexiLoggerError::SpecfileIo)?; subscriber.set_new_spec(LogSpecification::from_toml(s)?)?; } else { if let Some(specfolder) = specfile.parent() { std::fs::DirBuilder::new() .recursive(true) .create(specfolder) .map_err(FlexiLoggerError::SpecfileIo)?; } let mut file = std::fs::OpenOptions::new() .write(true) .create_new(true) .open(specfile) .map_err(FlexiLoggerError::SpecfileIo)?; subscriber.initial_spec()?.to_toml(&mut file)?; } Ok(()) } #[cfg(feature = "specfile_without_notification")] pub(crate) fn log_spec_string_from_file>( specfile: P, ) -> Result { let mut buf = String::new(); let mut file = std::fs::File::open(specfile)?; file.read_to_string(&mut buf)?; Ok(buf) } /// Used to control which messages are to be duplicated to stderr, when `log_to_file()` is used. #[derive(Debug, Clone, Copy)] pub enum Duplicate { /// No messages are duplicated. None = 0, /// Only error messages are duplicated. Error = 1, /// Error and warn messages are duplicated. Warn = 2, /// Error, warn, and info messages are duplicated. Info = 3, /// Error, warn, info, and debug messages are duplicated. Debug = 4, /// All messages are duplicated. Trace = 5, /// All messages are duplicated. All = 6, } impl From for Duplicate { fn from(val: u8) -> Self { match val { 0 => Duplicate::None, 1 => Duplicate::Error, 2 => Duplicate::Warn, 3 => Duplicate::Info, 4 => Duplicate::Debug, 5 => Duplicate::Trace, 6 => Duplicate::All, _ => unreachable!(), } } } impl From for Duplicate { fn from(level: LevelFilter) -> Self { match level { LevelFilter::Off => Duplicate::None, LevelFilter::Error => Duplicate::Error, LevelFilter::Warn => Duplicate::Warn, LevelFilter::Info => Duplicate::Info, LevelFilter::Debug => Duplicate::Debug, LevelFilter::Trace => Duplicate::Trace, } } } impl From for LevelFilter { fn from(level: Duplicate) -> Self { match level { Duplicate::None => LevelFilter::Off, Duplicate::Error => LevelFilter::Error, Duplicate::Warn => LevelFilter::Warn, Duplicate::Info => LevelFilter::Info, Duplicate::Debug => LevelFilter::Debug, Duplicate::Trace | Duplicate::All => LevelFilter::Trace, } } } flexi_logger-0.29.8/src/logger_handle.rs000064400000000000000000000422761046102023000163340ustar 00000000000000use crate::{ primary_writer::PrimaryWriter, util::{eprint_err, ErrorCode}, writers::{FileLogWriterBuilder, FileLogWriterConfig, LogWriter}, Duplicate, FlexiLoggerError, LogSpecification, }; #[cfg(feature = "specfile")] use notify_debouncer_mini::{notify::RecommendedWatcher, Debouncer}; #[cfg(feature = "specfile")] use std::sync::Mutex; use std::{ collections::HashMap, path::PathBuf, sync::{Arc, RwLock}, }; /// Allows reconfiguring the logger while the program is running, and /// **shuts down the logger when it is dropped**. /// /// A `LoggerHandle` is returned from `Logger::start()` and from `Logger::start_with_specfile()`. /// /// Keep it alive until the very end of your program, because it shuts down the logger when /// its dropped! /// (This is only relevant if you use one of /// `Logger::log_to_file`, `Logger::log_to_writer`, or `Logger::log_to_file_and_writer`, or /// a buffering or asynchronous [`WriteMode`](crate::WriteMode)). /// /// `LoggerHandle` offers methods to modify the log specification programmatically, /// to flush the logger explicitly, and to reconfigure the used `FileLogWriter` -- /// if one is used. /// /// # Examples /// /// In more trivial configurations, dropping the `LoggerHandle` has no effect and then /// you can safely ignore the return value of `Logger::start()`: /// /// ```rust /// use flexi_logger::Logger; /// use std::error::Error; /// fn main() -> Result<(), Box> { /// Logger::try_with_str("info")?.start()?; /// // do work /// Ok(()) /// } /// ``` /// /// When logging to a file or another writer, /// and/or if you use a buffering or asynchronous [`WriteMode`](crate::WriteMode), /// keep the `LoggerHandle` alive until the program ends: /// /// ```rust /// use flexi_logger::{FileSpec, Logger}; /// use std::error::Error; /// fn main() -> Result<(), Box> { /// let _logger = Logger::try_with_str("info")? /// .log_to_file(FileSpec::default()) /// .start()?; /// // do work /// Ok(()) /// } /// ``` /// /// You can use the logger handle to permanently exchange the log specification programmatically, /// anywhere in your code: /// /// ```rust /// # use flexi_logger::Logger; /// # use std::error::Error; /// # fn main() -> Result<(), Box> { /// let logger = Logger::try_with_str("info")?.start()?; /// // ... /// logger.parse_new_spec("warn"); /// // ... /// # Ok(()) /// # } /// ``` /// /// However, when debugging, you often want to modify the log spec only temporarily, for /// one or few method calls only; this is easier done with the following method, because /// it allows switching back to the previous spec: /// /// ```rust /// # use flexi_logger::Logger; /// # use std::error::Error; /// # fn main() -> Result<(), Box> { /// let mut logger = Logger::try_with_str("info")?.start()?; /// logger.parse_and_push_temp_spec("trace"); /// // ... /// // critical calls /// // ... /// logger.pop_temp_spec(); /// // Continue with the log spec you had before. /// // ... /// # Ok(()) /// # } /// ``` #[derive(Clone)] pub struct LoggerHandle where Self: Send + Sync, // Note: we demand Send and Sync explicitly because we want to be able to move a // `LoggerHandle` between threads. // At least with notify_debouncer_mini version 0.4.1 this would not be given if we omitted // the Mutex (which we don't need otherwise): we'd then get // `std::sync::mpsc::Sender` cannot be shared \ // between threads safely { pub(crate) writers_handle: WritersHandle, #[cfg(feature = "specfile")] pub(crate) oam_specfile_watcher: Option>>>, } impl LoggerHandle { pub(crate) fn new( spec: Arc>, primary_writer: Arc, other_writers: Arc>>, ) -> Self { Self { writers_handle: WritersHandle { spec, spec_stack: Vec::default(), primary_writer, other_writers, }, #[cfg(feature = "specfile")] oam_specfile_watcher: None, } } // pub(crate) fn reconfigure(&self, max_level: log::LevelFilter) { self.writers_handle.reconfigure(max_level); } /// Replaces the active `LogSpecification`. pub fn set_new_spec(&self, new_spec: LogSpecification) { self.writers_handle .set_new_spec(new_spec) .map_err(|e| eprint_err(ErrorCode::Poison, "rwlock on log spec is poisoned", &e)) .ok(); } /// Tries to replace the active `LogSpecification` with the result from parsing the given String. /// /// # Errors /// /// [`FlexiLoggerError::Parse`] if the input is malformed. pub fn parse_new_spec(&self, spec: &str) -> Result<(), FlexiLoggerError> { self.set_new_spec(LogSpecification::parse(spec)?); Ok(()) } /// Replaces the active `LogSpecification` and pushes the previous one to a stack. #[allow(clippy::missing_panics_doc)] pub fn push_temp_spec(&mut self, new_spec: LogSpecification) { self.writers_handle .spec_stack .push(self.writers_handle.spec.read().unwrap(/* catch and expose error? */).clone()); self.set_new_spec(new_spec); } /// Tries to replace the active `LogSpecification` with the result from parsing the given String /// and pushes the previous one to a Stack. /// /// # Errors /// /// [`FlexiLoggerError::Parse`] if the input is malformed. pub fn parse_and_push_temp_spec>( &mut self, new_spec: S, ) -> Result<(), FlexiLoggerError> { self.writers_handle.spec_stack.push( self.writers_handle .spec .read() .map_err(|_| FlexiLoggerError::Poison)? .clone(), ); self.set_new_spec(LogSpecification::parse(new_spec)?); Ok(()) } /// Reverts to the previous `LogSpecification`, if any. pub fn pop_temp_spec(&mut self) { if let Some(previous_spec) = self.writers_handle.spec_stack.pop() { self.set_new_spec(previous_spec); } } /// Flush all writers. pub fn flush(&self) { self.writers_handle.primary_writer.flush().ok(); for writer in self.writers_handle.other_writers.values() { writer.flush().ok(); } } /// Replaces parts of the configuration of the file log writer. /// /// Note that neither the write mode nor the format function can be reset and /// that the provided `FileLogWriterBuilder` must have the same values for these as the /// currently used `FileLogWriter`. /// /// # Example /// /// See [`code_examples`](code_examples/index.html#reconfigure-the-file-log-writer). /// /// # Errors /// /// `FlexiLoggerError::NoFileLogger` if no file log writer is configured. /// /// `FlexiLoggerError::Reset` if a reset was tried with a different write mode. /// /// `FlexiLoggerError::Io` if the specified path doesn't work. /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn reset_flw(&self, flwb: &FileLogWriterBuilder) -> Result<(), FlexiLoggerError> { if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { mw.reset_file_log_writer(flwb) } else { Err(FlexiLoggerError::NoFileLogger) } } /// Returns the current configuration of the file log writer. /// /// # Errors /// /// `FlexiLoggerError::NoFileLogger` if no file log writer is configured. /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn flw_config(&self) -> Result { if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { mw.flw_config() } else { Err(FlexiLoggerError::NoFileLogger) } } /// Makes the logger re-open the current log file. /// /// If the log is written to a file, `flexi_logger` expects that nobody else modifies the file, /// and offers capabilities to rotate, compress, and clean up log files. /// /// However, if you use tools like linux' `logrotate` /// to rename or delete the current output file, you need to inform `flexi_logger` about /// such actions by calling this method. Otherwise `flexi_logger` will not stop /// writing to the renamed or even deleted file! /// /// In more complex configurations, i.e. when more than one output stream is written to, /// all of them will be attempted to be re-opened; only the first error will be reported. /// /// # Example /// /// `logrotate` e.g. can be configured to send a `SIGHUP` signal to your program. You need to /// handle `SIGHUP` in your program explicitly, /// e.g. using a crate like [`ctrlc`](https://docs.rs/ctrlc/latest/ctrlc/), /// and call this function from the registered signal handler. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. /// /// Other variants of `FlexiLoggerError`, depending on the used writers. pub fn reopen_output(&self) -> Result<(), FlexiLoggerError> { let mut result = if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { mw.reopen_output() } else { Ok(()) }; for blw in self.writers_handle.other_writers.values() { let result2 = blw.reopen_output(); if result.is_ok() && result2.is_err() { result = result2; } } result } /// Trigger an extra log file rotation. /// /// Does nothing if rotation is not configured. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. /// /// IO errors. pub fn trigger_rotation(&self) -> Result<(), FlexiLoggerError> { let mut result = if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { mw.trigger_rotation() } else { Ok(()) }; for blw in self.writers_handle.other_writers.values() { let result2 = blw.rotate(); if result.is_ok() && result2.is_err() { result = result2; } } result } /// Shutdown all participating writers. /// /// This method is supposed to be called at the very end of your program, if /// /// - you use some [`Cleanup`](crate::Cleanup) strategy with compression: /// then you want to ensure that a termination of your program /// does not interrput the cleanup-thread when it is compressing a log file, /// which could leave unexpected files in the filesystem /// - you use your own writer(s), and they need to clean up resources /// /// See also [`writers::LogWriter::shutdown`](crate::writers::LogWriter::shutdown). pub fn shutdown(&self) { self.writers_handle.primary_writer.shutdown(); for writer in self.writers_handle.other_writers.values() { writer.shutdown(); } } /// Returns the list of existing log files according to the current `FileSpec`. /// /// Depending on the given selector, the list may include the CURRENT log file /// and the compressed files, if they exist. /// The list is empty if the logger is not configured for writing to files. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn existing_log_files( &self, selector: &LogfileSelector, ) -> Result, FlexiLoggerError> { let mut log_files = self .writers_handle .primary_writer .existing_log_files(selector)?; log_files.sort(); Ok(log_files) } /// Allows re-configuring duplication to stderr. /// /// # Errors /// /// `FlexiLoggerError::NoDuplication` /// if `FlexiLogger` was initialized without duplication support pub fn adapt_duplication_to_stderr(&mut self, dup: Duplicate) -> Result<(), FlexiLoggerError> { if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { mw.adapt_duplication_to_stderr(dup); Ok(()) } else { Err(FlexiLoggerError::NoFileLogger) } } /// Allows re-configuring duplication to stdout. /// /// # Errors /// /// `FlexiLoggerError::NoDuplication` /// if `FlexiLogger` was initialized without duplication support pub fn adapt_duplication_to_stdout(&mut self, dup: Duplicate) -> Result<(), FlexiLoggerError> { if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { mw.adapt_duplication_to_stdout(dup); Ok(()) } else { Err(FlexiLoggerError::NoFileLogger) } } // Allows checking the logs written so far to the writer #[doc(hidden)] pub fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { self.writers_handle.primary_writer.validate_logs(expected); } // Allows checking the logs written so far to the writer #[doc(hidden)] pub fn validate_additional_logs( &self, target: &str, expected: &[(&'static str, &'static str, &'static str)], ) { self.writers_handle .other_writers .get(target) .unwrap(/*fail fast*/) .validate_logs(expected); } } /// Used in [`LoggerHandle::existing_log_files`]. /// /// Example: /// /// ```rust /// # use flexi_logger::{LogfileSelector,Logger}; /// # let logger_handle = Logger::try_with_env().unwrap().start().unwrap(); /// let all_log_files = logger_handle.existing_log_files( /// &LogfileSelector::default() /// .with_r_current() /// .with_compressed_files() /// ); /// ``` pub struct LogfileSelector { pub(crate) with_plain_files: bool, pub(crate) with_r_current: bool, pub(crate) with_compressed_files: bool, pub(crate) with_configured_current: Option, } impl Default for LogfileSelector { /// Selects plain log files without the `rCURRENT` file. fn default() -> Self { Self { with_plain_files: true, with_r_current: false, with_compressed_files: false, with_configured_current: None, } } } impl LogfileSelector { /// Selects no file at all. #[must_use] pub fn none() -> Self { Self { with_plain_files: false, with_r_current: false, with_compressed_files: false, with_configured_current: None, } } /// Selects additionally the `rCURRENT` file. #[must_use] pub fn with_r_current(mut self) -> Self { self.with_r_current = true; self } /// Selects additionally a custom "current" file. #[must_use] pub fn with_custom_current(mut self, s: &str) -> Self { self.with_configured_current = Some(s.to_string()); self } /// Selects additionally the compressed log files. #[must_use] pub fn with_compressed_files(mut self) -> Self { self.with_compressed_files = true; self } } #[derive(Clone)] pub(crate) struct WritersHandle { spec: Arc>, spec_stack: Vec, primary_writer: Arc, other_writers: Arc>>, } impl WritersHandle { fn set_new_spec(&self, new_spec: LogSpecification) -> Result<(), FlexiLoggerError> { let max_level = new_spec.max_level(); self.spec .write() .map_err(|_| FlexiLoggerError::Poison)? .update_from(new_spec); self.reconfigure(max_level); Ok(()) } pub(crate) fn reconfigure(&self, mut max_level: log::LevelFilter) { for w in self.other_writers.as_ref().values() { max_level = std::cmp::max(max_level, w.max_log_level()); } log::set_max_level(max_level); } } impl Drop for WritersHandle { fn drop(&mut self) { self.primary_writer.shutdown(); for writer in self.other_writers.values() { writer.shutdown(); } } } /// Trait that allows to register for changes to the log specification. #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] pub trait LogSpecSubscriber: 'static + Send { /// Apply a new `LogSpecification`. /// /// # Errors fn set_new_spec(&mut self, new_spec: LogSpecification) -> Result<(), FlexiLoggerError>; /// Provide the current log spec. /// /// # Errors fn initial_spec(&self) -> Result; } #[cfg(feature = "specfile_without_notification")] impl LogSpecSubscriber for WritersHandle { fn set_new_spec(&mut self, new_spec: LogSpecification) -> Result<(), FlexiLoggerError> { WritersHandle::set_new_spec(self, new_spec) } fn initial_spec(&self) -> Result { Ok((*self.spec.read().map_err(|_e| FlexiLoggerError::Poison)?).clone()) } } flexi_logger-0.29.8/src/parameters/age.rs000064400000000000000000000012761046102023000164340ustar 00000000000000/// The age after which a log file rotation will be triggered, /// when [`Criterion::Age`](crate::Criterion::Age) is chosen. #[derive(Copy, Clone, Debug)] pub enum Age { /// Rotate the log file when the local clock has started a new day since the /// current file had been created. Day, /// Rotate the log file when the local clock has started a new hour since the /// current file had been created. Hour, /// Rotate the log file when the local clock has started a new minute since the /// current file had been created. Minute, /// Rotate the log file when the local clock has started a new second since the /// current file had been created. Second, } flexi_logger-0.29.8/src/parameters/cleanup.rs000064400000000000000000000033471046102023000173300ustar 00000000000000/// Defines the strategy for handling older log files. /// /// Is used in [`Logger::rotate`](crate::Logger::rotate). /// /// Note that if you use a strategy other than `Cleanup::Never`, then the cleanup work is /// by default done in an extra thread, to minimize the impact on the program. /// /// See [`LoggerHandle::shutdown`](crate::LoggerHandle::shutdown) /// to avoid interrupting a currently active cleanup when your program terminates. /// /// See /// [`Logger::cleanup_in_background_thread`](crate::Logger::cleanup_in_background_thread) /// if you want to control whether this extra thread is created and used. #[derive(Copy, Clone, Debug)] pub enum Cleanup { /// Older log files are not touched - they remain for ever. Never, /// The specified number of rotated log files are kept. /// Older files are deleted, if necessary. KeepLogFiles(usize), /// The specified number of rotated log files are compressed and kept. /// Older files are deleted, if necessary. #[cfg_attr(docsrs, doc(cfg(feature = "compress")))] #[cfg(feature = "compress")] KeepCompressedFiles(usize), /// Allows keeping some files as text files and some as compressed files. /// /// ## Example /// /// `KeepLogAndCompressedFiles(5,30)` ensures that the youngest five log files are /// kept as text files, the next 30 are kept as compressed files with additional suffix `.gz`, /// and older files are removed. #[cfg_attr(docsrs, doc(cfg(feature = "compress")))] #[cfg(feature = "compress")] KeepLogAndCompressedFiles(usize, usize), } impl Cleanup { // Returns true if some cleanup is to be done. #[must_use] pub(crate) fn do_cleanup(&self) -> bool { !matches!(self, Self::Never) } } flexi_logger-0.29.8/src/parameters/criterion.rs000064400000000000000000000052201046102023000176670ustar 00000000000000use super::age::Age; /// Criterion when to rotate the log file. /// /// Used in [`Logger::rotate`](crate::Logger::rotate). #[derive(Copy, Clone, Debug)] pub enum Criterion { /// Rotate the log file when it exceeds the specified size in bytes. Size(u64), /// Rotate the log file when it has become older than the specified age. /// /// ## Minor limitation /// /// ### TL,DR /// the combination of `Logger::append()` /// with `Criterion::Age` works OK, but not perfectly correct on Windows or unix /// when the program is restarted. /// /// ### Details /// Applying the age criterion works fine while your program is running. /// Ideally, we should also apply it to the rCURRENT file when the program is restarted /// and you chose the `Logger::append()` option. /// /// Unfortunately, this does not work on Windows, and it does not work on unix, /// for different reasons. /// /// To minimize the impact on age-based file-rotation, /// `flexi_logger` uses on Windows, and on all other platforms where the creation date /// of a file is not available (like on Unix), the last modification date /// (or, if this is also not available, the current time stamp) /// as the created_at-info of an rCURRENT file that already exists when the program is started, /// and the current timestamp when file rotation happens during further execution. /// Consequently, a left-over rCURRENT file from a previous program run will look newer /// than it is, and will be used longer than it should be. /// /// #### Issue on Linux /// /// Linux does not maintain a created-at property for files, only a last-changed-at property. /// /// #### Issue on Windows /// /// For compatibility with DOS (sic!), Windows magically transfers the created_at-info /// of a file that is deleted (or renamed) to its successor, /// when the recreation happens within some seconds [\[1\]](#ref-1). /// /// If the file property were used by `flexi_logger`, /// the rCURRENT file would always appear to be as old as the /// first one that ever was created - rotation by time would fail completely. /// /// \[1\] [https://superuser.com/questions/966490/windows-7-what-is-date-created-file-property-referring-to](https://superuser.com/questions/966490/windows-7-what-is-date-created-file-property-referring-to). /// Age(Age), /// Rotate the file when it has either become older than the specified age, or when it has /// exceeded the specified size in bytes. /// /// See documentation for Age and Size. AgeOrSize(Age, u64), } flexi_logger-0.29.8/src/parameters/file_spec.rs000064400000000000000000000612561046102023000176350ustar 00000000000000use crate::writers::file_log_writer::InfixFilter; use crate::{DeferredNow, FlexiLoggerError}; use std::{ ffi::{OsStr, OsString}, ops::Add, path::{Path, PathBuf}, }; /// Builder object for specifying the name and path of the log output file. /// /// The filename is built from several partially components, using this pattern: /// /// ``` = [][_][][_][][_][][.]``` /// /// - `[]`: This is by default the program's name, but can be set to a different value /// or suppressed at all. /// /// - `[_]`: Consecutive name parts are separated by an underscore. /// No underscore is used at the beginning of the filename and directly before the suffix. /// /// - `[]`: some optional name part that allows further differentiations. /// /// - `[]`: denotes the point in time when the program was started, if used. /// /// - `[infix]`: used with rotation to differentiate consecutive files. /// /// Without rotation, the default filename pattern uses the program name as basename, /// no discriminant, the timestamp of the program start /// (printed in the format "YYYY-MM-DD_hh-mm-ss"), /// and the suffix `.log`, e.g. /// /// ```myprog_2015-07-08_10-44-11.log```. /// /// This ensures that with every program start a new trace file is written that can easily /// be associated with a concrete program run. /// /// When the timestamp is suppressed with [`FileSpec::suppress_timestamp`], /// you get a fixed output file name. /// It is then worth considering whether a new program start should discard /// the content of an already existing outputfile or if it should append its new content to it /// (see [`Logger::append`](crate::Logger::append)). /// /// With rotation, the timestamp is by default suppressed and instead the infix is used. /// The infix starts always with "r". /// For more details how its precise content can be influenced, see [`Naming`](crate::Naming). /// #[derive(Debug, Clone, Eq, PartialEq)] pub struct FileSpec { pub(crate) directory: PathBuf, pub(crate) basename: String, pub(crate) o_discriminant: Option, timestamp_cfg: TimestampCfg, o_suffix: Option, pub(crate) use_utc: bool, } impl Default for FileSpec { /// Describes a file in the current folder, /// using, as its filestem, the program name followed by the current timestamp, /// and the suffix ".log". #[must_use] fn default() -> Self { FileSpec { directory: PathBuf::from("."), basename: Self::default_basename(), o_discriminant: None, timestamp_cfg: TimestampCfg::Default, o_suffix: Some(String::from("log")), use_utc: false, } } } impl FileSpec { fn default_basename() -> String { let arg0 = std::env::args().next().unwrap_or_else(|| "rs".to_owned()); Path::new(&arg0).file_stem().map(OsStr::to_string_lossy).unwrap(/*cannot fail*/).to_string() } /// The provided path should describe a log file. /// If it exists, it must be a file, not a folder. /// If necessary, parent folders will be created. /// /// ```rust /// # use flexi_logger::FileSpec; /// assert_eq!( /// FileSpec::default() /// .directory("/a/b/c") /// .basename("foo") /// .suppress_timestamp() /// .suffix("bar"), /// FileSpec::try_from("/a/b/c/foo.bar").unwrap() /// ); /// ``` /// # Errors /// /// [`FlexiLoggerError::OutputBadFile`] if the given path exists and is a folder. /// /// # Panics /// /// Panics if the basename of the given path has no filename pub fn try_from>(p: P) -> Result { let p: PathBuf = p.into(); if p.is_dir() { Err(FlexiLoggerError::OutputBadFile) } else { Ok(FileSpec { directory: p.parent().unwrap(/*cannot fail*/).to_path_buf(), basename: p.file_stem().unwrap(/*ok*/).to_string_lossy().to_string(), o_discriminant: None, o_suffix: p.extension().map(|s| s.to_string_lossy().to_string()), timestamp_cfg: TimestampCfg::No, use_utc: false, }) } } /// Makes the logger not include a basename into the names of the log files /// /// Equivalent to `basename("")`. #[must_use] pub fn suppress_basename(self) -> Self { self.basename("") } /// The specified String is used as the basename of the log file name, /// instead of the program name. Using a file separator within the argument is discouraged. #[must_use] pub fn basename>(mut self, basename: S) -> Self { self.basename = basename.into(); self } /// The specified String is used as the basename of the log file, /// instead of the program name, which is used when `None` is given. #[must_use] pub fn o_basename>(mut self, o_basename: Option) -> Self { self.basename = o_basename.map_or_else(Self::default_basename, Into::into); self } /// Specifies a folder for the log files. /// /// If the specified folder does not exist, it will be created. /// By default, the log files are created in the folder where the program was started. #[must_use] pub fn directory>(mut self, directory: P) -> Self { self.directory = directory.into(); self } /// Specifies a folder for the log files. /// /// If the specified folder does not exist, it will be created. /// With None, the log files are created in the folder where the program was started. #[must_use] pub fn o_directory>(mut self, directory: Option

) -> Self { self.directory = directory.map_or_else(|| PathBuf::from("."), Into::into); self } /// The specified String is added to the log file name. #[must_use] pub fn discriminant>(self, discriminant: S) -> Self { self.o_discriminant(Some(discriminant)) } /// The specified String is added to the log file name. #[must_use] pub fn o_discriminant>(mut self, o_discriminant: Option) -> Self { self.o_discriminant = o_discriminant.map(Into::into); self } /// Specifies a suffix for the log files. /// /// Equivalent to `o_suffix(Some(suffix))`. #[must_use] pub fn suffix>(self, suffix: S) -> Self { self.o_suffix(Some(suffix)) } /// Specifies a suffix for the log files, or supresses the use of a suffix completely. /// /// The default suffix is "log". #[must_use] pub fn o_suffix>(mut self, o_suffix: Option) -> Self { self.o_suffix = o_suffix.map(Into::into); self } /// Makes the logger not include the start time into the names of the log files /// /// Equivalent to `use_timestamp(false)`. #[must_use] pub fn suppress_timestamp(self) -> Self { self.use_timestamp(false) } /// Defines if the start time should be included into the names of the log files. /// /// The _default_ behavior depends on the usage: /// - without rotation, a timestamp is by default included into the name /// - with rotation, the timestamp is by default suppressed #[must_use] pub fn use_timestamp(mut self, use_timestamp: bool) -> Self { self.timestamp_cfg = if use_timestamp { TimestampCfg::Yes } else { TimestampCfg::No }; self } #[doc(hidden)] #[must_use] pub fn used_directory(&self) -> PathBuf { self.directory.clone() } pub(crate) fn has_basename(&self) -> bool { !self.basename.is_empty() } pub(crate) fn has_discriminant(&self) -> bool { self.o_discriminant.is_some() } pub(crate) fn uses_timestamp(&self) -> bool { matches!(self.timestamp_cfg, TimestampCfg::Yes) } // If no decision was done yet, decide now whether to include a timestamp // into the names of the log files. pub(crate) fn if_default_use_timestamp(&mut self, use_timestamp: bool) { if let TimestampCfg::Default = self.timestamp_cfg { self.timestamp_cfg = if use_timestamp { TimestampCfg::Yes } else { TimestampCfg::No }; } } pub(crate) fn get_directory(&self) -> PathBuf { self.directory.clone() } pub(crate) fn get_suffix(&self) -> Option { self.o_suffix.clone() } // basename + o_discriminant + o_timestamp pub(crate) fn fixed_name_part(&self) -> String { let mut fixed_name_part = self.basename.clone(); fixed_name_part.reserve(50); if let Some(discriminant) = &self.o_discriminant { append_underscore_if_not_empty(&mut fixed_name_part); fixed_name_part.push_str(discriminant); } if let Some(timestamp) = &self.timestamp_cfg.get_timestamp() { append_underscore_if_not_empty(&mut fixed_name_part); fixed_name_part.push_str(timestamp); } fixed_name_part } /// Derives a `PathBuf` from the spec and the given infix. #[must_use] pub fn as_pathbuf(&self, o_infix: Option<&str>) -> PathBuf { let mut filename = self.fixed_name_part(); if let Some(infix) = o_infix { if !infix.is_empty() { append_underscore_if_not_empty(&mut filename); filename.push_str(infix); } }; if let Some(suffix) = &self.o_suffix { filename.push('.'); filename.push_str(suffix); } let mut p_path = self.directory.clone(); p_path.push(filename); p_path } // handles collisions by appending ".restart-" to the infix, if necessary pub(crate) fn collision_free_infix_for_rotated_file(&self, infix: &str) -> String { let uncompressed_files = self.list_of_files( &InfixFilter::Equls(infix.to_string()), self.o_suffix.as_deref(), ); let compressed_files = self.list_of_files(&InfixFilter::Equls(infix.to_string()), Some("gz")); let mut restart_siblings = uncompressed_files .into_iter() .chain(compressed_files) .filter(|pb| { // ignore .gz suffix let mut pb2 = PathBuf::from(pb); if pb2.extension() == Some(OsString::from("gz").as_ref()) { pb2.set_extension(""); }; // suffix must match the given suffix, if one is given match self.o_suffix { Some(ref sfx) => pb2.extension() == Some(OsString::from(sfx).as_ref()), None => true, } }) .filter(|pb| { pb.file_name() .unwrap() .to_string_lossy() .contains(".restart-") }) .collect::>(); let new_path = self.as_pathbuf(Some(infix)); let new_path_with_gz = { let mut new_path_with_gz = new_path.clone(); new_path_with_gz .set_extension([self.o_suffix.as_deref().unwrap_or(""), ".gz"].concat()); new_path_with_gz }; // if collision would occur (new_path or compressed new_path exists already), // find highest restart and add 1, else continue without restart if new_path.exists() || new_path_with_gz.exists() || !restart_siblings.is_empty() { let next_number = if restart_siblings.is_empty() { 0 } else { restart_siblings.sort_unstable(); let new_path = restart_siblings.pop().unwrap(/*ok*/); let file_stem_string = if self.o_suffix.is_some() { new_path .file_stem().unwrap(/*ok*/) .to_string_lossy().to_string() } else { new_path.to_string_lossy().to_string() }; let index = file_stem_string.find(".restart-").unwrap(/*ok*/); file_stem_string[(index + 9)..(index + 13)].parse::().unwrap(/*ok*/) + 1 }; infix.to_string().add(&format!(".restart-{next_number:04}")) } else { infix.to_string() } } pub(crate) fn list_of_files( &self, infix_filter: &InfixFilter, o_suffix: Option<&str>, ) -> Vec { self.filter_files(&self.read_dir_related_files(), infix_filter, o_suffix) } // returns an ordered list of all files in the right directory that start with the fixed_name_part pub(crate) fn read_dir_related_files(&self) -> Vec { let fixed_name_part = self.fixed_name_part(); let mut log_files = std::fs::read_dir(&self.directory) .unwrap(/*ignore errors from reading the directory*/) .flatten(/*ignore errors from reading entries in the directory*/) .filter(|entry| entry.path().is_file()) .map(|de| de.path()) .filter(|path| { // fixed name part must match if let Some(fln) = path.file_name() { fln.to_string_lossy(/*good enough*/).starts_with(&fixed_name_part) } else { false } }) .collect::>(); log_files.sort_unstable(); log_files.reverse(); log_files } pub(crate) fn filter_files( &self, files: &[PathBuf], infix_filter: &InfixFilter, o_suffix: Option<&str>, ) -> Vec { let fixed_name_part = self.fixed_name_part(); files .iter() .filter(|path| { // if suffix is specified, it must match if let Some(suffix) = o_suffix { path.extension().is_some_and(|ext| { let s = ext.to_string_lossy(); s == suffix }) } else { true } }) .filter(|path| { // infix filter must pass let stem = path.file_stem().unwrap(/* CANNOT FAIL*/).to_string_lossy(); let infix_start = if fixed_name_part.is_empty() { 0 } else { fixed_name_part.len() + 1 // underscore at the end }; if stem.len() <= infix_start { return false; } let maybe_infix = &stem[infix_start..]; let end = maybe_infix.find('.').unwrap_or(maybe_infix.len()); infix_filter.filter_infix(&maybe_infix[..end]) }) .map(PathBuf::clone) .collect::>() } #[cfg(test)] pub(crate) fn get_timestamp(&self) -> Option { self.timestamp_cfg.get_timestamp() } } fn append_underscore_if_not_empty(filename: &mut String) { if !filename.is_empty() { filename.push('_'); } } const TS_USCORE_DASHES_USCORE_DASHES: &str = "%Y-%m-%d_%H-%M-%S"; #[derive(Debug, Clone, Eq, PartialEq)] enum TimestampCfg { Default, Yes, No, } impl TimestampCfg { fn get_timestamp(&self) -> Option { match self { Self::Default | Self::Yes => Some( DeferredNow::new() .format(TS_USCORE_DASHES_USCORE_DASHES) .to_string(), ), Self::No => None, } } } #[cfg(test)] mod test { use super::{FileSpec, TimestampCfg}; use crate::writers::file_log_writer::InfixFilter; use std::{ fs::File, path::{Path, PathBuf}, }; #[test] fn test_timstamp_cfg() { let ts = TimestampCfg::Yes; let s = ts.get_timestamp().unwrap(/* OK */); let bytes = s.into_bytes(); assert_eq!(bytes[4], b'-'); assert_eq!(bytes[7], b'-'); assert_eq!(bytes[10], b'_'); assert_eq!(bytes[13], b'-'); assert_eq!(bytes[16], b'-'); } #[test] fn test_default() { let path = FileSpec::default().as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("."), true, "log"); } // todo: does not support suppress_timestamp & suppress_basename & use discriminant fn assert_file_spec(path: &Path, folder: &Path, with_timestamp: bool, suffix: &str) { // check folder assert_eq!( path.parent().unwrap(), // .canonicalize().unwrap() folder // .canonicalize().unwrap() ); // check file stem // - should start with progname let progname = PathBuf::from(std::env::args().next().unwrap()) .file_stem() .unwrap() .to_string_lossy() .clone() .to_string(); let stem = path .file_stem() .unwrap() .to_string_lossy() .clone() .to_string(); assert!( stem.starts_with(&progname), "stem: {stem:?}, progname: {progname:?}", ); if with_timestamp { // followed by _ and timestamp assert_eq!(stem.as_bytes()[progname.len()], b'_'); let s_ts = &stem[progname.len() + 1..]; assert!( chrono::NaiveDateTime::parse_from_str(s_ts, "%Y-%m-%d_%H-%M-%S").is_ok(), "s_ts: \"{s_ts}\"", ); } else { assert_eq!( stem.len(), progname.len(), "stem: {stem:?}, progname: {progname:?}", ); } // check suffix assert_eq!(path.extension().unwrap(), suffix); } #[test] fn test_if_default_use_timestamp() { // default() + if_default_use_timestamp(false) => false { let mut fs = FileSpec::default(); fs.if_default_use_timestamp(false); let path = fs.as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("."), false, "log"); } // default() + use_timestamp(true) + if_default_use_timestamp(false) => true { let mut fs = FileSpec::default().use_timestamp(true); fs.if_default_use_timestamp(false); let path = fs.as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("."), true, "log"); } // default() + use_timestamp(false) + if_default_use_timestamp(true) + => true { let mut fs = FileSpec::default(); fs.if_default_use_timestamp(false); let path = fs.use_timestamp(true).as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("."), true, "log"); } // default() + if_default_use_timestamp(false) + use_timestamp(true) => true { let mut fs = FileSpec::default(); fs.if_default_use_timestamp(false); let path = fs.use_timestamp(true).as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("."), true, "log"); } } #[test] fn test_from_url() { let path = FileSpec::try_from("/a/b/c/d_foo_bar.trc") .unwrap() .as_pathbuf(None); // check folder assert_eq!(path.parent().unwrap(), PathBuf::from("/a/b/c")); // check filestem // - should start with progname let stem = path .file_stem() .unwrap() .to_string_lossy() .clone() .to_string(); assert_eq!(stem, "d_foo_bar"); // check suffix assert_eq!(path.extension().unwrap(), "trc"); } #[test] fn test_basename() { { let path = FileSpec::try_from("/a/b/c/d_foo_bar.trc") .unwrap() .o_basename(Some("boo_far")) .as_pathbuf(None); // check folder assert_eq!(path.parent().unwrap(), PathBuf::from("/a/b/c")); // check filestem // - should start with progname let stem = path .file_stem() .unwrap() .to_string_lossy() .clone() .to_string(); assert_eq!(stem, "boo_far"); // check suffix assert_eq!(path.extension().unwrap(), "trc"); } { let path = FileSpec::try_from("/a/b/c/d_foo_bar.trc") .unwrap() .o_basename(Option::::None) .as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("/a/b/c"), false, "trc"); } } #[test] fn test_directory_and_suffix() { { let path = FileSpec::try_from("/a/b/c/d_foo_bar.trc") .unwrap() .directory("/x/y/z") .o_suffix(Some("txt")) .o_basename(Option::::None) .as_pathbuf(None); assert_file_spec(&path, &PathBuf::from("/x/y/z"), false, "txt"); } } #[test] fn test_discriminant() { let path = FileSpec::try_from("/a/b/c/d_foo_bar.trc") .unwrap() .directory("/x/y/z") .o_suffix(Some("txt")) .o_discriminant(Some("1234")) .as_pathbuf(None); assert_eq!( path.file_name().unwrap().to_str().unwrap(), "d_foo_bar_1234.txt" ); } #[test] fn test_suppress_basename() { let path = FileSpec::try_from("/a/b/c/d_foo_bar.trc") .unwrap() .suppress_basename() .o_suffix(Some("txt")) .o_discriminant(Some("1234")) .as_pathbuf(None); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "1234.txt"); } #[test] fn test_empty_base_name() { let path = FileSpec::default() .suppress_basename() .suppress_timestamp() .o_discriminant(Option::::None) .as_pathbuf(None); assert_eq!(path.file_name().unwrap(), ".log"); } #[test] fn test_empty_name() { let path = FileSpec::default() .suppress_basename() .suppress_timestamp() .o_suffix(Option::::None) .as_pathbuf(None); assert!(path.file_name().is_none()); } #[test] fn issue_178() { let path = FileSpec::default() .basename("BASENAME") .suppress_timestamp() .as_pathbuf(Some("")); assert_eq!(path.file_name().unwrap().to_string_lossy(), "BASENAME.log"); let path = FileSpec::default() .basename("BASENAME") .discriminant("1") .suppress_timestamp() .as_pathbuf(Some("")); assert_eq!( path.file_name().unwrap().to_string_lossy(), "BASENAME_1.log" ); } #[test] fn test_list_of_files() { let dir = temp_dir::TempDir::new().unwrap(); let pd = dir.path(); let filespec: FileSpec = FileSpec::default() .directory(pd) .basename("Base") .discriminant("Discr") .use_timestamp(true); println!("Filespec: {}", filespec.as_pathbuf(Some("Infix")).display()); let mut fn1 = String::new(); fn1.push_str("Base_Discr_"); fn1.push_str(&filespec.get_timestamp().unwrap()); fn1.push_str("_Infix"); fn1.push_str(".log"); assert_eq!( filespec .as_pathbuf(Some("Infix")) .file_name() .unwrap() .to_string_lossy(), fn1 ); // create typical set of files, and noise create_file(pd, "test1.txt"); create_file(pd, &build_filename(&filespec, "Infix1")); create_file(pd, &build_filename(&filespec, "Infix2")); println!("\nFolder content:"); for entry in std::fs::read_dir(pd).unwrap() { println!(" {}", entry.unwrap().path().display()); } println!("\nRelevant subset:"); for pb in filespec.list_of_files(&InfixFilter::StartsWth("Infix".to_string()), Some("log")) { println!(" {}", pb.display()); } } fn build_filename(file_spec: &FileSpec, infix: &str) -> String { let mut fn1 = String::new(); fn1.push_str("Base_Discr_"); fn1.push_str(&file_spec.get_timestamp().unwrap()); fn1.push('_'); fn1.push_str(infix); fn1.push_str(".log"); fn1 } fn create_file(dir: &Path, filename: &str) { File::create(dir.join(filename)).unwrap(); } } flexi_logger-0.29.8/src/parameters/naming.rs000064400000000000000000000112211046102023000171400ustar 00000000000000/// The naming convention for rotated log files. /// /// Common rule for all variants is that the names of the current output file /// and the rotated log files only differ in the infix. /// /// See [`Logger::log_to_file`](crate::Logger::log_to_file) /// for a description of how the filename is built, including the infix. /// /// See the variants for how the infix is used by them. /// /// Used in [`Logger::rotate`](crate::Logger::rotate). #[derive(Copy, Clone, Debug)] pub enum Naming { /// Logs are written to a file with infix `rCURRENT`. /// /// File rotation renames this file to a name with a timestamp-infix /// like `"r2023-01-27_14-41-08"`, logging continues with a fresh file with infix `rCURRENT`. /// /// If multiple rotations happen within the same second, extended infixes are used like /// `"r2023-01-27_14-41-08.restart-0001"`. /// /// Same as /// ```rust /// # use flexi_logger::Naming; /// # let dummy = /// Naming::TimestampsCustomFormat { /// current_infix: Some("rCURRENT"), /// format: "r%Y-%m-%d_%H-%M-%S", /// } /// # ; /// ``` Timestamps, /// Logs are written to a file with a timestamp-infix, like `"r2023-01-27_14-41-08"`. /// /// File rotation switches over to the next file. /// /// If multiple rotations happen within the same second, extended infixes are used like /// `"r2023-01-27_14-41-08.restart-0001"`. /// /// Same as /// ```rust /// # use flexi_logger::Naming; /// # let dummy = /// Naming::TimestampsCustomFormat { /// current_infix: None, /// format: "r%Y-%m-%d_%H-%M-%S", /// } /// # ; /// ``` TimestampsDirect, /// Defines the infixes for the file to which the logs are written, and for the rotated files. TimestampsCustomFormat { /// Controls if a special infix is used for the file to which the logs are currently /// written. /// /// If `Some(infix)` is given, then it is taken as static infix for the file /// to which the logs are written. /// File rotation renames this file to a file with a timestamp infix. /// If this file already exists, an extended infix is used like /// `"2024-06-09.restart-0001"`. /// /// If `None` is given, then the logs will be directly written to a file with timestamp infix. /// File rotation only switches over to a new file with a fresh timestamp infix. /// If this file already exists, e.g. because rotation is triggered more frequently /// than the timestamp varies (according to the pattern), then an extended infix is used like /// `"2024-06-09.restart-0001"`. current_infix: Option<&'static str>, /// The format of the timestamp infix. /// /// See for a list of /// supported specifiers. /// /// **Make sure to use a format** /// /// - that is compatible to your file system(s) (e.g., don't use slashes), /// - that can be used by /// [chrono::NaiveDateTime](https://docs.rs/chrono/latest/chrono/naive/struct.NaiveDateTime.html#method.parse_from_str) /// or [chrono::NaiveDate](https://docs.rs/chrono/latest/chrono/naive/struct.NaiveDate.html#method.parse_from_str) /// /// Further, if you choose `current_infix` = `None` or `Some("")`, make sure to rotate only /// by [age](crate::Criterion::Age), and choose an age that is not smaller than what /// is expressed in the infix (e.g., don't rotate by minute if the infix only shows days). /// /// Examples: /// /// `"%Y-%m-%d"` produces timestamp infixes like `"2024-06-09"`. /// /// `"%Y-%m-%d_%H-%M-%S"` produces timestamp infixes like `"2024-06-09_13-24-35"`. format: &'static str, }, /// Logs are written to a file with infix `rCURRENT`. /// /// File rotation renames this file to a name with a number-infix /// like `"r00000"`, `"r00001"`, etc., /// logging continues with a fresh file with infix `rCURRENT`. Numbers, /// Logs are written to a file with a number-infix, /// like `"r00000"`, `"r00001"`, etc. /// /// File rotation switches over to the next file. NumbersDirect, } impl Naming { pub(crate) fn writes_direct(self) -> bool { matches!( self, Naming::NumbersDirect | Naming::TimestampsDirect | Naming::TimestampsCustomFormat { current_infix: None | Some(""), format: _ } ) } } flexi_logger-0.29.8/src/parameters.rs000064400000000000000000000003001046102023000156630ustar 00000000000000mod age; mod cleanup; mod criterion; mod file_spec; mod naming; pub use age::Age; pub use cleanup::Cleanup; pub use criterion::Criterion; pub use file_spec::FileSpec; pub use naming::Naming; flexi_logger-0.29.8/src/primary_writer/multi_writer.rs000064400000000000000000000173701046102023000213440ustar 00000000000000use crate::{ logger::Duplicate, util::{eprint_err, write_buffered, ErrorCode}, writers::{FileLogWriter, FileLogWriterBuilder, FileLogWriterConfig, LogWriter}, LogfileSelector, {DeferredNow, FlexiLoggerError, FormatFunction}, }; use log::Record; use std::{ io::Write, path::PathBuf, sync::atomic::{AtomicU8, Ordering}, }; // The `MultiWriter` writes logs to a FileLogWriter and/or another Writer, // and can duplicate messages to stderr or stdout. pub(crate) struct MultiWriter { duplicate_stderr: AtomicU8, duplicate_stdout: AtomicU8, support_capture: bool, format_for_stderr: FormatFunction, format_for_stdout: FormatFunction, o_file_writer: Option>, o_other_writer: Option>, } impl MultiWriter { pub(crate) fn new( duplicate_stderr: Duplicate, duplicate_stdout: Duplicate, support_capture: bool, format_for_stderr: FormatFunction, format_for_stdout: FormatFunction, o_file_writer: Option>, o_other_writer: Option>, ) -> Self { MultiWriter { duplicate_stderr: AtomicU8::new(duplicate_stderr as u8), duplicate_stdout: AtomicU8::new(duplicate_stdout as u8), support_capture, format_for_stderr, format_for_stdout, o_file_writer, o_other_writer, } } pub(crate) fn reset_file_log_writer( &self, flwb: &FileLogWriterBuilder, ) -> Result<(), FlexiLoggerError> { self.o_file_writer .as_ref() .map_or(Err(FlexiLoggerError::NoFileLogger), |flw| flw.reset(flwb)) } pub(crate) fn flw_config(&self) -> Result { self.o_file_writer .as_ref() .map_or(Err(FlexiLoggerError::NoFileLogger), |flw| flw.config()) } pub(crate) fn reopen_output(&self) -> Result<(), FlexiLoggerError> { match (&self.o_file_writer, &self.o_other_writer) { (None, None) => Ok(()), (Some(ref w), None) => w.reopen_outputfile(), (None, Some(w)) => w.reopen_output(), (Some(w1), Some(w2)) => { let r1 = w1.reopen_outputfile(); let r2 = w2.reopen_output(); match (r1, r2) { (Ok(()), Ok(())) => Ok(()), (Err(e), _) | (Ok(()), Err(e)) => Err(e), } } } } pub(crate) fn trigger_rotation(&self) -> Result<(), FlexiLoggerError> { match (&self.o_file_writer, &self.o_other_writer) { (None, None) => Ok(()), (Some(ref w), None) => w.rotate(), (None, Some(w)) => w.rotate(), (Some(w1), Some(w2)) => { let r1 = w1.rotate(); let r2 = w2.rotate(); match (r1, r2) { (Ok(()), Ok(())) => Ok(()), (Err(e), _) | (Ok(()), Err(e)) => Err(e), } } } } pub(crate) fn existing_log_files( &self, selector: &LogfileSelector, ) -> Result, FlexiLoggerError> { if let Some(fw) = self.o_file_writer.as_ref() { fw.existing_log_files(selector) } else { Ok(Vec::new()) } } pub(crate) fn adapt_duplication_to_stderr(&self, dup: Duplicate) { self.duplicate_stderr.store(dup as u8, Ordering::Relaxed); } pub(crate) fn adapt_duplication_to_stdout(&self, dup: Duplicate) { self.duplicate_stdout.store(dup as u8, Ordering::Relaxed); } fn duplication_to_stderr(&self) -> Duplicate { Duplicate::from(self.duplicate_stderr.load(Ordering::Relaxed)) } fn duplication_to_stdout(&self) -> Duplicate { Duplicate::from(self.duplicate_stdout.load(Ordering::Relaxed)) } } impl LogWriter for MultiWriter { fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { if let Some(ref writer) = self.o_file_writer { (*writer).validate_logs(expected); } if let Some(ref writer) = self.o_other_writer { (*writer).validate_logs(expected); } } fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { if match self.duplication_to_stderr() { Duplicate::Error => record.level() == log::Level::Error, Duplicate::Warn => record.level() <= log::Level::Warn, Duplicate::Info => record.level() <= log::Level::Info, Duplicate::Debug => record.level() <= log::Level::Debug, Duplicate::Trace | Duplicate::All => true, Duplicate::None => false, } { if self.support_capture { let mut tmp_buf = Vec::::with_capacity(200); (self.format_for_stderr)(&mut tmp_buf, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); eprintln!("{}", String::from_utf8_lossy(&tmp_buf)); } else { write_buffered( self.format_for_stderr, now, record, &mut std::io::stderr(), #[cfg(test)] None, )?; } } if match self.duplication_to_stdout() { Duplicate::Error => record.level() == log::Level::Error, Duplicate::Warn => record.level() <= log::Level::Warn, Duplicate::Info => record.level() <= log::Level::Info, Duplicate::Debug => record.level() <= log::Level::Debug, Duplicate::Trace | Duplicate::All => true, Duplicate::None => false, } { if self.support_capture { let mut tmp_buf = Vec::::with_capacity(200); (self.format_for_stdout)(&mut tmp_buf, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); println!("{}", String::from_utf8_lossy(&tmp_buf)); } else { write_buffered( self.format_for_stdout, now, record, &mut std::io::stdout(), #[cfg(test)] None, )?; } } if let Some(ref writer) = self.o_file_writer { writer.write(now, record)?; } if let Some(ref writer) = self.o_other_writer { writer.write(now, record)?; } Ok(()) } /// Provides the maximum log level that is to be written. fn max_log_level(&self) -> log::LevelFilter { *self .o_file_writer .as_ref() .map(|w| w.max_log_level()) .iter() .chain( self.o_other_writer .as_ref() .map(|w| w.max_log_level()) .iter(), ) .max() .unwrap(/*ok*/) } fn flush(&self) -> std::io::Result<()> { if let Some(ref writer) = self.o_file_writer { writer.flush()?; } if let Some(ref writer) = self.o_other_writer { writer.flush()?; } if !matches!(self.duplication_to_stderr(), Duplicate::None) { std::io::stderr().flush()?; } if !matches!(self.duplication_to_stdout(), Duplicate::None) { std::io::stdout().flush()?; } Ok(()) } fn shutdown(&self) { if let Some(ref writer) = self.o_file_writer { writer.shutdown(); } if let Some(ref writer) = self.o_other_writer { writer.shutdown(); } } } flexi_logger-0.29.8/src/primary_writer/std_stream.rs000064400000000000000000000026561046102023000207640ustar 00000000000000use std::io::{Error as IoError, Stderr, StderrLock, Stdout, StdoutLock, Write}; // Abstraction over stdout and stderr pub(crate) enum StdStream { Out(Stdout), Err(Stderr), } impl<'a> StdStream { pub(crate) fn deref_mut(&mut self) -> &mut dyn Write { match self { StdStream::Out(ref mut s) => s as &mut dyn Write, StdStream::Err(ref mut s) => s as &mut dyn Write, } } pub(crate) fn lock(&'a self) -> StdstreamLock<'a> { match self { StdStream::Out(ref s) => StdstreamLock::Out(s.lock()), StdStream::Err(ref s) => StdstreamLock::Err(s.lock()), } } } impl Write for StdStream { fn write(&mut self, buffer: &[u8]) -> std::result::Result { self.deref_mut().write(buffer) } fn flush(&mut self) -> std::result::Result<(), IoError> { self.deref_mut().flush() } } pub(crate) enum StdstreamLock<'a> { Out(StdoutLock<'a>), Err(StderrLock<'a>), } impl Write for StdstreamLock<'_> { fn write(&mut self, buffer: &[u8]) -> std::result::Result { match self { StdstreamLock::Out(l) => l.write(buffer), StdstreamLock::Err(l) => l.write(buffer), } } fn flush(&mut self) -> std::result::Result<(), IoError> { match self { StdstreamLock::Out(l) => l.flush(), StdstreamLock::Err(l) => l.flush(), } } } flexi_logger-0.29.8/src/primary_writer/std_writer.rs000064400000000000000000000205201046102023000207730ustar 00000000000000#[cfg(feature = "async")] use { crate::{ util::{eprint_err, ErrorCode, ASYNC_FLUSH, ASYNC_SHUTDOWN}, ZERO_DURATION, }, crossbeam_channel::{SendError, Sender}, crossbeam_queue::ArrayQueue, }; use { super::std_stream::StdStream, crate::{ util::{io_err, write_buffered}, writers::LogWriter, DeferredNow, EffectiveWriteMode, FormatFunction, WriteMode, }, log::Record, std::io::{BufWriter, Write}, }; #[cfg(test)] use std::io::Cursor; #[cfg(any(feature = "async", test))] use std::sync::Arc; use std::sync::Mutex; #[cfg(feature = "async")] use std::thread::JoinHandle; // `StdWriter` writes logs to stdout or stderr. pub(crate) struct StdWriter { format: FormatFunction, writer: InnerStdWriter, #[cfg(test)] validation_buffer: Arc>>>, } enum InnerStdWriter { Unbuffered(StdStream), Buffered(Mutex>), #[cfg(feature = "async")] Async(AsyncHandle), } #[cfg(feature = "async")] #[derive(Debug)] struct AsyncHandle { sender: Sender>, mo_thread_handle: Mutex>>, a_pool: Arc>>, msg_capa: usize, } #[cfg(feature = "async")] impl AsyncHandle { fn new( stdstream: StdStream, pool_capa: usize, msg_capa: usize, #[cfg(test)] validation_buffer: &Arc>>>, ) -> Self { let (sender, receiver) = crossbeam_channel::unbounded::>(); let a_pool = Arc::new(ArrayQueue::new(pool_capa)); let mo_thread_handle = crate::threads::start_async_stdwriter( stdstream, receiver, Arc::clone(&a_pool), msg_capa, #[cfg(test)] Arc::clone(validation_buffer), ); AsyncHandle { sender, mo_thread_handle, a_pool, msg_capa, } } fn pop_buffer(&self) -> Vec { self.a_pool .pop() .unwrap_or_else(|| Vec::with_capacity(self.msg_capa)) } fn send(&self, buffer: Vec) -> Result<(), SendError>> { self.sender.send(buffer) } } impl StdWriter { pub(crate) fn new( stdstream: StdStream, format: FormatFunction, write_mode: &WriteMode, ) -> Self { #[cfg(test)] let validation_buffer = Arc::new(Mutex::new(Cursor::new(Vec::::new()))); let writer = match write_mode.effective_write_mode() { EffectiveWriteMode::Direct => InnerStdWriter::Unbuffered(stdstream), EffectiveWriteMode::BufferDontFlushWith(capacity) => { InnerStdWriter::Buffered(Mutex::new(BufWriter::with_capacity(capacity, stdstream))) } EffectiveWriteMode::BufferAndFlushWith(_) => { unreachable!("Sync InnerStdWriter with own flushing is not implemented") } #[cfg(feature = "async")] EffectiveWriteMode::AsyncWith { pool_capa, message_capa, flush_interval, } => { assert_eq!( flush_interval, ZERO_DURATION, "Async InnerStdWriter with own flushing is not implemented" ); InnerStdWriter::Async(AsyncHandle::new( stdstream, pool_capa, message_capa, #[cfg(test)] &validation_buffer, )) } }; Self { format, writer, #[cfg(test)] validation_buffer, } } } impl LogWriter for StdWriter { #[inline] fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { match &self.writer { InnerStdWriter::Unbuffered(stdstream) => { let mut w = stdstream.lock(); write_buffered( self.format, now, record, &mut w, #[cfg(test)] Some(&self.validation_buffer), ) } InnerStdWriter::Buffered(m_w) => { let mut w = m_w.lock().map_err(|_e| io_err("Poison"))?; write_buffered( self.format, now, record, &mut *w, #[cfg(test)] Some(&self.validation_buffer), ) } #[cfg(feature = "async")] InnerStdWriter::Async(handle) => { let mut buffer = handle.pop_buffer(); (self.format)(&mut buffer, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); buffer .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); handle.send(buffer).map_err(|_e| io_err("Send"))?; Ok(()) } } } #[inline] fn flush(&self) -> std::io::Result<()> { match &self.writer { InnerStdWriter::Unbuffered(stdstream) => { let mut w = stdstream.lock(); w.flush() } InnerStdWriter::Buffered(m_w) => { let mut w = m_w.lock().map_err(|_e| io_err("Poison"))?; w.flush() } #[cfg(feature = "async")] InnerStdWriter::Async(handle) => { let mut buffer = handle.pop_buffer(); buffer.extend(ASYNC_FLUSH); handle.send(buffer).ok(); Ok(()) } } } fn shutdown(&self) { #[cfg(feature = "async")] if let InnerStdWriter::Async(handle) = &self.writer { let mut buffer = handle.pop_buffer(); buffer.extend(ASYNC_SHUTDOWN); handle.send(buffer).ok(); if let Ok(ref mut o_th) = handle.mo_thread_handle.lock() { o_th.take().and_then(|th| th.join().ok()); } } } #[cfg(not(test))] fn validate_logs(&self, _expected: &[(&'static str, &'static str, &'static str)]) {} #[cfg(test)] fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { { use std::io::BufRead; let write_cursor = self.validation_buffer.lock().unwrap(); let mut reader = std::io::BufReader::new(Cursor::new(write_cursor.get_ref())); let mut buf = String::new(); for tuple in expected { buf.clear(); reader.read_line(&mut buf).unwrap(); assert!(buf.contains(tuple.0), "Did not find tuple.0 = {}", tuple.0); assert!(buf.contains(tuple.1), "Did not find tuple.1 = {}", tuple.1); assert!(buf.contains(tuple.2), "Did not find tuple.2 = {}", tuple.2); } buf.clear(); reader.read_line(&mut buf).unwrap(); assert!(buf.is_empty(), "Found more log lines than expected: {buf} ",); } } } #[cfg(test)] mod test { use super::{StdStream, StdWriter}; use crate::{opt_format, writers::LogWriter, DeferredNow, WriteMode}; use log::Level::{Error, Info, Warn}; #[test] fn test_with_validation() { let writer = StdWriter::new( StdStream::Err(std::io::stderr()), opt_format, &WriteMode::Direct, ); let mut rb = log::Record::builder(); rb.target("myApp") .file(Some("std_writer.rs")) .line(Some(222)) .module_path(Some("std_writer::test::test_with_validation")); rb.level(Error) .args(format_args!("This is an error message")); writer.write(&mut DeferredNow::new(), &rb.build()).unwrap(); rb.level(Warn).args(format_args!("This is a warning")); writer.write(&mut DeferredNow::new(), &rb.build()).unwrap(); rb.level(Info).args(format_args!("This is an info message")); writer.write(&mut DeferredNow::new(), &rb.build()).unwrap(); writer.validate_logs(&[ ("ERROR", "std_writer.rs:222", "error"), ("WARN", "std_writer.rs:222", "warning"), ("INFO", "std_writer.rs:222", "info"), ]); } } flexi_logger-0.29.8/src/primary_writer/test_writer.rs000064400000000000000000000120721046102023000211630ustar 00000000000000use { crate::{writers::LogWriter, DeferredNow, FormatFunction}, log::Record, }; #[cfg(test)] use std::io::Cursor; use std::cell::RefCell; #[cfg(test)] use std::{ io::Write, sync::{Arc, Mutex}, }; use crate::util::{eprint_err, ErrorCode}; // `TestWriter` writes logs using println! pub(crate) struct TestWriter { format: FormatFunction, stdout: bool, #[cfg(test)] validation_buffer: Arc>>>, } impl TestWriter { pub(crate) fn new(stdout: bool, format: FormatFunction) -> Self { #[cfg(test)] let validation_buffer = Arc::new(Mutex::new(Cursor::new(Vec::::new()))); Self { format, stdout, #[cfg(test)] validation_buffer, } } } impl LogWriter for TestWriter { #[inline] fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { Ok(mut buffer) => { (self.format)(&mut *buffer, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); if self.stdout { println!("{}", String::from_utf8_lossy(&buffer)); } else { eprintln!("{}", String::from_utf8_lossy(&buffer)); } #[cfg(test)] { let mut cursor = self.validation_buffer.lock().unwrap(); cursor.write_all(&buffer).ok(); cursor.write_all(b"\n").ok(); } buffer.clear(); } Err(_e) => { // We arrive here in the rare cases of recursive logging // (e.g. log calls in Debug or Display implementations) // we print the inner calls, in chronological order, before finally the // outer most message is printed let mut tmp_buf = Vec::::with_capacity(200); (self.format)(&mut tmp_buf, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); if self.stdout { println!("{}", String::from_utf8_lossy(&tmp_buf)); } else { eprintln!("{}", String::from_utf8_lossy(&tmp_buf)); } #[cfg(test)] { let mut cursor = self.validation_buffer.lock().unwrap(); cursor.write_all(&tmp_buf).ok(); cursor.write_all(b"\n").ok(); } } }); Ok(()) } #[inline] fn flush(&self) -> std::io::Result<()> { Ok(()) } fn shutdown(&self) {} #[cfg(not(test))] fn validate_logs(&self, _expected: &[(&'static str, &'static str, &'static str)]) {} #[cfg(test)] fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { { use std::io::BufRead; let write_cursor = self.validation_buffer.lock().unwrap(); let mut reader = std::io::BufReader::new(Cursor::new(write_cursor.get_ref())); let mut buf = String::new(); for tuple in expected { buf.clear(); reader.read_line(&mut buf).unwrap(); assert!(buf.contains(tuple.0), "Did not find tuple.0 = {}", tuple.0); assert!(buf.contains(tuple.1), "Did not find tuple.1 = {}", tuple.1); assert!(buf.contains(tuple.2), "Did not find tuple.2 = {}", tuple.2); } buf.clear(); reader.read_line(&mut buf).unwrap(); assert!(buf.is_empty(), "Found more log lines than expected: {buf} ",); } } } // Thread-local buffer pub(crate) fn buffer_with(f: F) where F: FnOnce(&RefCell>), { thread_local! { static BUFFER: RefCell> = RefCell::new(Vec::with_capacity(200)); } BUFFER.with(f); } #[cfg(test)] mod test { use super::TestWriter; use crate::{opt_format, writers::LogWriter, DeferredNow}; use log::Level::{Error, Info, Warn}; #[test] fn test_with_validation() { let writer = TestWriter::new(true, opt_format); let mut rb = log::Record::builder(); rb.target("myApp") .file(Some("std_writer.rs")) .line(Some(222)) .module_path(Some("std_writer::test::test_with_validation")); rb.level(Error) .args(format_args!("This is an error message")); writer.write(&mut DeferredNow::new(), &rb.build()).unwrap(); rb.level(Warn).args(format_args!("This is a warning")); writer.write(&mut DeferredNow::new(), &rb.build()).unwrap(); rb.level(Info).args(format_args!("This is an info message")); writer.write(&mut DeferredNow::new(), &rb.build()).unwrap(); writer.validate_logs(&[ ("ERROR", "std_writer.rs:222", "error"), ("WARN", "std_writer.rs:222", "warning"), ("INFO", "std_writer.rs:222", "info"), ]); } } flexi_logger-0.29.8/src/primary_writer.rs000064400000000000000000000072741046102023000166200ustar 00000000000000mod multi_writer; pub(crate) mod std_stream; mod std_writer; mod test_writer; use self::{ multi_writer::MultiWriter, std_stream::StdStream, std_writer::StdWriter, test_writer::TestWriter, }; use crate::{ filter::LogLineWriter, logger::Duplicate, writers::{FileLogWriter, LogWriter}, DeferredNow, FlexiLoggerError, FormatFunction, LogfileSelector, WriteMode, }; use log::Record; use std::path::PathBuf; // Primary writer // // all normal logging goes here pub(crate) enum PrimaryWriter { // Writes to stdout or to stderr Std(StdWriter), // Writes to a file or to nowhere, with optional "duplication" to stderr or stdout Multi(MultiWriter), // Writes using println! to stdout, to enable capturing in tests Test(TestWriter), } impl PrimaryWriter { pub fn multi( duplicate_stderr: Duplicate, duplicate_stdout: Duplicate, support_capture: bool, format_for_stderr: FormatFunction, format_for_stdout: FormatFunction, o_file_writer: Option>, o_other_writer: Option>, ) -> Self { Self::Multi(MultiWriter::new( duplicate_stderr, duplicate_stdout, support_capture, format_for_stderr, format_for_stdout, o_file_writer, o_other_writer, )) } pub fn stderr(format: FormatFunction, write_mode: &WriteMode) -> Self { Self::Std(StdWriter::new( StdStream::Err(std::io::stderr()), format, write_mode, )) } pub fn stdout(format: FormatFunction, write_mode: &WriteMode) -> Self { Self::Std(StdWriter::new( StdStream::Out(std::io::stdout()), format, write_mode, )) } pub fn test(stdout: bool, format: FormatFunction) -> Self { Self::Test(TestWriter::new(stdout, format)) } // Write out a log line. pub fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { match *self { Self::Std(ref w) => w.write(now, record), Self::Multi(ref w) => w.write(now, record), Self::Test(ref w) => w.write(now, record), } } // Flush any buffered records. pub fn flush(&self) -> std::io::Result<()> { match *self { Self::Std(ref w) => w.flush(), Self::Multi(ref w) => w.flush(), Self::Test(ref w) => w.flush(), } } pub fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { self.shutdown(); match self { Self::Std(writer) => { writer.validate_logs(expected); } Self::Multi(writer) => { writer.validate_logs(expected); } Self::Test(writer) => { writer.validate_logs(expected); } } } pub fn shutdown(&self) { self.flush().ok(); match self { Self::Std(writer) => { writer.shutdown(); } Self::Multi(writer) => { writer.shutdown(); } Self::Test(writer) => { writer.shutdown(); } } } pub fn existing_log_files( &self, selector: &LogfileSelector, ) -> Result, FlexiLoggerError> { match self { Self::Multi(multi_writer) => multi_writer.existing_log_files(selector), _ => Ok(Vec::new()), } } } impl LogLineWriter for PrimaryWriter { fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { self.write(now, record) } } flexi_logger-0.29.8/src/threads.rs000064400000000000000000000073671046102023000151760ustar 00000000000000use { crate::{primary_writer::PrimaryWriter, writers::LogWriter, FlexiLoggerError}, std::{ collections::HashMap, sync::{ mpsc::{channel, Receiver, Sender}, Arc, }, thread::Builder as ThreadBuilder, }, }; #[cfg(feature = "async")] use { crate::{ primary_writer::std_stream::StdStream, util::{eprint_err, ErrorCode, ASYNC_FLUSH, ASYNC_SHUTDOWN}, }, crossbeam_channel::Receiver as CrossbeamReceiver, crossbeam_queue::ArrayQueue, std::{sync::Mutex, thread::JoinHandle}, }; #[cfg(feature = "async")] #[cfg(test)] use std::io::Write; #[cfg(feature = "async")] const ASYNC_STD_WRITER: &str = "flexi_logger-async_std_writer"; const FLUSHER: &str = "flexi_logger-flusher"; // Used in Logger pub(crate) fn start_flusher_thread( primary_writer: Arc, other_writers: Arc>>, flush_interval: std::time::Duration, ) -> Result<(), FlexiLoggerError> { let builder = ThreadBuilder::new().name(FLUSHER.to_string()); #[cfg(not(feature = "dont_minimize_extra_stacks"))] let builder = builder.stack_size(1024); builder.spawn(move || { let (_sender, receiver): (Sender<()>, Receiver<()>) = channel(); loop { receiver.recv_timeout(flush_interval).ok(); primary_writer.flush().ok(); for w in other_writers.values() { w.flush().ok(); } } })?; Ok(()) } #[cfg(feature = "async")] pub(crate) fn start_async_stdwriter( mut std_stream: StdStream, receiver: CrossbeamReceiver>, t_pool: Arc>>, msg_capa: usize, #[cfg(test)] t_validation_buffer: Arc>>>, ) -> Mutex>> { Mutex::new(Some( ThreadBuilder::new() .name( ASYNC_STD_WRITER.to_string() ) .spawn(move || { loop { match receiver.recv() { Err(_) => break, Ok(mut message) => { match message.as_ref() { ASYNC_FLUSH => { std_stream .deref_mut() .flush() .unwrap_or_else( |e| eprint_err(ErrorCode::Flush, "flushing failed", &e) ); } ASYNC_SHUTDOWN => { break; } _ => { std_stream .deref_mut() .write_all(&message) .unwrap_or_else( |e| eprint_err(ErrorCode::Write,"writing failed", &e) ); #[cfg(test)] if let Ok(mut guard) = t_validation_buffer.lock() { (*guard).write_all(&message).ok(); } } } if message.capacity() <= msg_capa { message.clear(); t_pool.push(message).ok(); } } } } }) .unwrap(/* yes, let's panic if the thread can't be spawned */), )) } flexi_logger-0.29.8/src/trc.rs000064400000000000000000000157431046102023000143310ustar 00000000000000//! ## Use `flexi_logger` functionality with [`tracing`](https://docs.rs/tracing/latest/tracing/). //! //! [`tracing`](https://docs.rs/tracing/latest/tracing/) is an alternative to //! [`log`](https://docs.rs/log/latest/log/). //! It has a similar base architecture, but is optimized for supporting async apps, //! which adds complexity due to the need to manage contexts. //! [`tracing-subscriber`](https://docs.rs/tracing/latest/tracing-subscriber/) //! facilitates contributing "backends", and is used in the example below to plug //! `flexi_logger`-functionality into `tracing`. //! //! **The content of this module is an attempt to support such an integration. //! Feedback is highly appreciated.** //! //! ### Example //! //! The following code example uses two features of `flexi_logger`: //! //! * a fully configurable `FileLogWriter` as trace writer //! * and `flexi_logger`'s specfile handling to adapt `tracing` dynamically, //! while your program is running. //! //! Precondition: add these entries to your `Cargo.toml`: //! ```toml //! flexi_logger = {version = "0.23", features = ["trc"]} //! tracing = "0.1" //! ``` //! //! In this example, the interaction with `tracing` components is completely hidden, //! for convenience. //! If you want to influence `tracing` further, what might often be the case, //! you need to copy the code of method `setup_tracing` into your program and modify it. //! //! Unfortunately, especially due to the use of closures in `tracing-subscriber`'s API, //! it is not easy to provide a convenient _and_ flexible API for plugging `flexi_logger` //! functionality into `tracing`. //! //! ```rust,ignore //! # #[cfg(feature = "specfile_without_notification")] //! # { //! # use std::error::Error; //! use flexi_logger::{ //! writers::FileLogWriter, //! Age, Cleanup, Criterion, FileSpec, LogSpecification, Naming, WriteMode, //! }; //! //! # fn main() -> Result<(), Box> { //! //! // Drop the keep-alive-handles only in the shutdown of your program //! let _keep_alive_handles = flexi_logger::trc::setup_tracing( //! LogSpecification::info(), //! Some(&PathBuf::from("trcspecfile.toml")), //! FileLogWriter::builder(FileSpec::default()) //! .rotate( //! Criterion::Age(Age::Day), //! Naming::Timestamps, //! Cleanup::KeepLogFiles(7), //! ) //! .write_mode(WriteMode::Async), //!)?; //! //! tracing::debug!("now we start doing what we really wanted to do...") //! # Ok(())}} //! ``` //! pub use crate::logger_handle::LogSpecSubscriber; use crate::{ logger::{create_specfile_watcher, synchronize_subscriber_with_specfile}, writers::{FileLogWriterBuilder, FileLogWriterHandle}, }; use crate::{FlexiLoggerError, LogSpecification}; use notify_debouncer_mini::{notify::RecommendedWatcher, Debouncer}; use std::path::{Path, PathBuf}; use tracing_subscriber::{EnvFilter, FmtSubscriber}; /// Allows registering a `LogSpecSubscriber` to a specfile. /// /// Every update to the specfile will be noticed (via crate `notify`), /// the file will be re-read, and the `LogSpecSubscriber` will be updated. /// /// # Errors /// /// Several variants of [`FlexiLoggerError`] can occur. #[cfg(feature = "specfile_without_notification")] #[cfg_attr(docsrs, doc(cfg(feature = "specfile")))] pub fn subscribe_to_specfile>( specfile: P, reloader: Box, initial_logspec: LogSpecification, ) -> Result>, FlexiLoggerError> { let specfile = specfile.as_ref(); let mut subscriber = TraceLogSpecSubscriber::new(reloader, initial_logspec); synchronize_subscriber_with_specfile(&mut subscriber, specfile)?; if cfg!(feature = "specfile") { Ok(Some(create_specfile_watcher(specfile, subscriber)?)) } else { Ok(None) } } /// Helper struct that can be registered in /// [`subscribe_to_specfile`](fn.subscribe_to_specfile.html) to get /// informed about updates to the specfile, /// and can be registered in `tracing` to forward such updates. #[cfg(feature = "specfile_without_notification")] struct TraceLogSpecSubscriber { initial_logspec: LogSpecification, update: Box<(dyn Fn(LogSpecification) + Send + Sync)>, } impl TraceLogSpecSubscriber { /// Factory method. /// /// # Parameters /// `initial_logspec`: used to initialize the logspec file if it does not yet exist /// /// update: Closure that implements the update of the log specification to some consumer #[must_use] pub fn new( update: Box<(dyn Fn(LogSpecification) + Send + Sync)>, initial_logspec: LogSpecification, ) -> Self { Self { initial_logspec, update, } } } #[cfg(feature = "specfile_without_notification")] impl LogSpecSubscriber for TraceLogSpecSubscriber { fn set_new_spec(&mut self, logspec: LogSpecification) -> Result<(), FlexiLoggerError> { (self.update)(logspec); Ok(()) } fn initial_spec(&self) -> Result { Ok(self.initial_logspec.clone()) } } /// Rereads the specfile if it was updated and forwards the update to `tracing`'s filter. pub struct SpecFileNotifier { _watcher: Option>, } /// Set up tracing to write into the specified `FileLogWriter`, /// and to use the (optionally) specified specfile. /// /// The returned handles must be kept alive and should be dropped at the very end of the program. /// /// # Panics /// /// # Errors /// /// Various variants of `FlexiLoggerError` can occur. pub fn setup_tracing( initial_logspec: LogSpecification, o_specfile: Option<&PathBuf>, flwb: FileLogWriterBuilder, ) -> Result<(FileLogWriterHandle, SpecFileNotifier), FlexiLoggerError> { let (file_writer, fw_handle) = flwb.try_build_with_handle()?; // Set up subscriber that makes use of the file writer, with some hardcoded initial log spec let subscriber_builder = FmtSubscriber::builder() .with_writer(move || file_writer.clone()) .with_env_filter(LogSpecAsFilter(initial_logspec.clone())) .with_filter_reloading(); // Set up specfile watching let spec_file_notifier = SpecFileNotifier { _watcher: match o_specfile { Some(specfile) => { let reload_handle = Box::new(subscriber_builder.reload_handle()); subscribe_to_specfile( specfile, Box::new(move |logspec| { { reload_handle.reload(LogSpecAsFilter(logspec)) }.unwrap(/* OK */); }), initial_logspec, )? } None => None, }, }; // Get ready to trace tracing::subscriber::set_global_default(subscriber_builder.finish())?; Ok((fw_handle, spec_file_notifier)) } struct LogSpecAsFilter(pub LogSpecification); impl From for EnvFilter { fn from(wrapped_logspec: LogSpecAsFilter) -> Self { Self::new(wrapped_logspec.0.to_string()) } } flexi_logger-0.29.8/src/util.rs000064400000000000000000000151571046102023000145150ustar 00000000000000use crate::logger::ErrorChannel; use crate::{DeferredNow, FormatFunction}; use log::Record; use std::{ cell::RefCell, io::Write, path::Path, sync::{OnceLock, RwLock}, }; #[cfg(test)] use std::io::Cursor; #[cfg(test)] use std::sync::{Arc, Mutex}; #[cfg(feature = "async")] pub(crate) const ASYNC_FLUSH: &[u8] = b"F"; #[cfg(feature = "async")] pub(crate) const ASYNC_SHUTDOWN: &[u8] = b"S"; #[derive(Copy, Clone, Debug)] pub(crate) enum ErrorCode { Write, Flush, Format, LogFile, #[cfg(feature = "specfile")] LogSpecFile, #[cfg(feature = "colors")] Palette, Poison, #[cfg(target_family = "unix")] Symlink, WriterSpec, } impl ErrorCode { fn as_index(self) -> &'static str { match self { Self::Write => "write", Self::Flush => "flush", Self::Format => "format", Self::LogFile => "logfile", #[cfg(feature = "specfile")] Self::LogSpecFile => "logspecfile", #[cfg(feature = "colors")] Self::Palette => "palette", Self::Poison => "poison", #[cfg(target_family = "unix")] Self::Symlink => "symlink", Self::WriterSpec => "writerspec", } } } pub(crate) fn eprint_err(error_code: ErrorCode, msg: &str, err: &dyn std::error::Error) { try_writing_to_error_channel(&format!( "[flexi_logger][ERRCODE::{code:?}] {msg}, caused by {err:?}\n \ See https://docs.rs/flexi_logger/latest/flexi_logger/error_info/index.html#{code_lc}", msg = msg, err = err, code = error_code, code_lc = error_code.as_index(), )); } pub(crate) fn eprint_msg(error_code: ErrorCode, msg: &str) { try_writing_to_error_channel(&format!( "[flexi_logger][ERRCODE::{code:?}] {msg}\n \ See https://docs.rs/flexi_logger/latest/flexi_logger/error_info/index.html#{code_lc}", msg = msg, code = error_code, code_lc = error_code.as_index(), )); } fn error_channel() -> &'static RwLock { static ERROR_CHANNEL: OnceLock> = OnceLock::new(); ERROR_CHANNEL.get_or_init(|| RwLock::new(ErrorChannel::default())) } static PANIC_ON_ERROR_ERROR: OnceLock = OnceLock::new(); pub(crate) fn set_panic_on_error_channel_error(b: bool) { PANIC_ON_ERROR_ERROR.get_or_init(|| b); } fn panic_on_error_error() -> bool { *PANIC_ON_ERROR_ERROR.get().unwrap_or(&false) } fn handle_error_error(result: &Result<(), std::io::Error>) { if result.is_err() { assert!( !panic_on_error_error(), "flexi_logger panics because it ran into an error and cannot inform about it \ through its configured error output channel \ because the error output channel itself is broken. \n\ You can avoid this panic by using 'Logger::panic_if_error_channel_is_broken(false)' \ (see https://docs.rs/flexi_logger/latest/flexi_logger/struct.Logger.html#method.panic_if_error_channel_is_broken)." ); } } pub(crate) fn set_error_channel(channel: ErrorChannel) { match error_channel().write() { Ok(mut guard) => { *guard = channel; } Err(e) => { eprint_err(ErrorCode::Poison, "Error channel cannot be set", &e); } } } fn try_writing_to_error_channel(s: &str) { match &*(error_channel().read().unwrap()) { ErrorChannel::StdErr => { handle_error_error(&writeln!(std::io::stderr(), "{s}")); } ErrorChannel::StdOut => { handle_error_error(&writeln!(std::io::stdout(), "{s}")); } ErrorChannel::File(path) => try_writing_to_file(s, path).unwrap_or_else(|e| { handle_error_error(&writeln!(std::io::stderr(), "{s}")); handle_error_error(&writeln!( std::io::stderr(), "Can't open error output file, caused by: {e}" )); }), ErrorChannel::DevNull => {} } } fn try_writing_to_file(s: &str, path: &Path) -> Result<(), std::io::Error> { let mut file = std::fs::OpenOptions::new() .create(true) .append(true) .open(path)?; writeln!(file, "{s}")?; file.flush() } pub(crate) fn io_err(s: &'static str) -> std::io::Error { std::io::Error::new(std::io::ErrorKind::Other, s) } // Thread-local buffer pub(crate) fn buffer_with(f: F) where F: FnOnce(&RefCell>), { thread_local! { static BUFFER: RefCell> = RefCell::new(Vec::with_capacity(200)); } BUFFER.with(f); } // Use the thread-local buffer for formatting before writing into the given writer pub(crate) fn write_buffered( format_function: FormatFunction, now: &mut DeferredNow, record: &Record, w: &mut dyn Write, #[cfg(test)] o_validation_buffer: Option<&Arc>>>>, ) -> Result<(), std::io::Error> { let mut result: Result<(), std::io::Error> = Ok(()); buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { Ok(mut buffer) => { (format_function)(&mut *buffer, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); buffer .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); result = w.write_all(&buffer).map_err(|e| { eprint_err(ErrorCode::Write, "writing failed", &e); e }); #[cfg(test)] if let Some(valbuf) = o_validation_buffer { valbuf.lock().unwrap().write_all(&buffer).ok(); } buffer.clear(); } Err(_e) => { // We arrive here in the rare cases of recursive logging // (e.g. log calls in Debug or Display implementations) // we print the inner calls, in chronological order, before finally the // outer most message is printed let mut tmp_buf = Vec::::with_capacity(200); (format_function)(&mut tmp_buf, now, record) .unwrap_or_else(|e| eprint_err(ErrorCode::Format, "formatting failed", &e)); tmp_buf .write_all(b"\n") .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); result = w.write_all(&tmp_buf).map_err(|e| { eprint_err(ErrorCode::Write, "writing failed", &e); e }); #[cfg(test)] if let Some(valbuf) = o_validation_buffer { valbuf.lock().unwrap().write_all(&tmp_buf).ok(); } } }); result } flexi_logger-0.29.8/src/write_mode.rs000064400000000000000000000213311046102023000156650ustar 00000000000000use crate::ZERO_DURATION; use std::time::Duration; /// Default buffer capacity (8k), when buffering is used. pub const DEFAULT_BUFFER_CAPACITY: usize = 8 * 1024; /// Default flush interval (1s), when flushing is used. pub const DEFAULT_FLUSH_INTERVAL: Duration = Duration::from_secs(1); /// Default size of the message pool; /// a higher value could further reduce allocations during log file rotation and cleanup. #[cfg(feature = "async")] #[cfg_attr(docsrs, doc(cfg(feature = "async")))] pub const DEFAULT_POOL_CAPA: usize = 50; /// Default capacity for the message buffers; /// a higher value reduces allocations when longer log lines are used. #[cfg(feature = "async")] #[cfg_attr(docsrs, doc(cfg(feature = "async")))] pub const DEFAULT_MESSAGE_CAPA: usize = 200; /// Describes whether the log output should be written synchronously or asynchronously, /// and if and how I/O should be buffered and flushed. /// /// Is used in [`Logger::write_mode`](struct.Logger.html#method.write_mode). /// /// Buffering reduces the program's I/O overhead, and thus increases overall performance, /// which can become relevant if logging is used heavily. /// On the other hand, if logging is used with low frequency, /// buffering can defer the appearance of log lines significantly, /// so regular flushing is usually advisable with buffering. /// /// **Note** that for all options except `Direct` you should keep the /// [`LoggerHandle`](struct.LoggerHandle.html) alive /// up to the very end of your program to ensure that all buffered log lines are flushed out /// (which happens automatically when the [`LoggerHandle`](struct.LoggerHandle.html) is dropped) /// before the program terminates. /// [See here for an example](code_examples/index.html#choose-the-write-mode). /// /// **Note** further that flushing uses an extra thread (with minimal stack). /// /// The console is a slow output device (at least on Windows). /// With `WriteMode::Async` it can happen that in phases with vast log output /// the log lines appear significantly later than they were written. /// Also, a final printing phase is possible at the end of the program when the logger handle /// is dropped (and all output is flushed automatically). /// /// `WriteMode::Direct` (i.e. without buffering) is the slowest option with all output devices, /// showing that buffered I/O pays off. /// /// Using `log_to_stdout()` and then redirecting the output to a file can make things faster, /// likely because the operating system's adds buffering, /// but is still significantly slower than writing to files directly. /// #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum WriteMode { /// Do not buffer (default). /// /// Every log line is directly written to the output, without buffering. /// This allows seeing new log lines in real time, and does not need additional threads. Direct, /// Do not buffer and support `cargo test`'s capture. /// /// Much like `Direct`, just a bit slower, and allows /// `cargo test` to capture log output and print it only for failing tests. SupportCapture, /// Same as `BufferAndFlushWith` with default capacity ([`DEFAULT_BUFFER_CAPACITY`]) /// and default interval ([`DEFAULT_FLUSH_INTERVAL`]). BufferAndFlush, /// Buffer and flush with given buffer capacity and flush interval. BufferAndFlushWith( /// Buffer capacity. usize, /// Flush interval. Duration, ), /// Same as `BufferDontFlushWith` with default capacity ([`DEFAULT_BUFFER_CAPACITY`]). BufferDontFlush, /// Buffer with given buffer capacity, but don't flush. /// /// This might be handy if you want to minimize I/O effort and don't want to create /// the extra thread for flushing and don't care if log lines appear with delay. BufferDontFlushWith( /// Buffer capacity. usize, ), /// Same as `AsyncWith`, using default values for all parameters. #[cfg_attr(docsrs, doc(cfg(feature = "async")))] #[cfg(feature = "async")] Async, /// Log lines are sent through an unbounded channel to an output thread, which /// does the I/O, and, if `log_to_file()` is chosen, also the rotation and the cleanup. /// /// Uses buffered output to reduce overhead, and a bounded message pool to reduce allocations. /// The log output is flushed regularly with the given interval. /// /// See [here](code_examples/index.html#choose-the-write-mode) for an example. #[cfg_attr(docsrs, doc(cfg(feature = "async")))] #[cfg(feature = "async")] AsyncWith { /// Capacity of the pool for the message buffers. pool_capa: usize, /// Capacity of an individual message buffer. message_capa: usize, /// The interval for flushing the output. /// /// With `Duration::ZERO` flushing is suppressed. flush_interval: Duration, }, } impl WriteMode { pub(crate) fn effective_write_mode(&self) -> EffectiveWriteMode { match *self { Self::Direct | Self::SupportCapture => EffectiveWriteMode::Direct, Self::BufferDontFlush => { EffectiveWriteMode::BufferDontFlushWith(DEFAULT_BUFFER_CAPACITY) } Self::BufferDontFlushWith(duration) => { EffectiveWriteMode::BufferDontFlushWith(duration) } Self::BufferAndFlush => EffectiveWriteMode::BufferAndFlushWith(DEFAULT_BUFFER_CAPACITY), Self::BufferAndFlushWith(bufsize, _duration) => { EffectiveWriteMode::BufferAndFlushWith(bufsize) } #[cfg(feature = "async")] Self::Async => EffectiveWriteMode::AsyncWith { pool_capa: DEFAULT_POOL_CAPA, message_capa: DEFAULT_MESSAGE_CAPA, flush_interval: DEFAULT_FLUSH_INTERVAL, }, #[cfg(feature = "async")] Self::AsyncWith { pool_capa, message_capa, flush_interval, } => EffectiveWriteMode::AsyncWith { pool_capa, message_capa, flush_interval, }, } } pub(crate) fn without_flushing(&self) -> WriteMode { match self { Self::Direct | Self::SupportCapture | Self::BufferDontFlush | Self::BufferDontFlushWith(_) => *self, Self::BufferAndFlush => Self::BufferDontFlush, Self::BufferAndFlushWith(bufsize, _) => Self::BufferDontFlushWith(*bufsize), #[cfg(feature = "async")] Self::Async => Self::AsyncWith { pool_capa: DEFAULT_POOL_CAPA, message_capa: DEFAULT_MESSAGE_CAPA, flush_interval: ZERO_DURATION, }, #[cfg(feature = "async")] Self::AsyncWith { pool_capa, message_capa, flush_interval: _, } => Self::AsyncWith { pool_capa: *pool_capa, message_capa: *message_capa, flush_interval: ZERO_DURATION, }, } } pub(crate) fn buffersize(&self) -> Option { match self.effective_write_mode() { EffectiveWriteMode::Direct => None, EffectiveWriteMode::BufferAndFlushWith(bufsize) | EffectiveWriteMode::BufferDontFlushWith(bufsize) => Some(bufsize), #[cfg(feature = "async")] EffectiveWriteMode::AsyncWith { pool_capa: _, message_capa: _, flush_interval: _, } => None, } } pub(crate) fn get_flush_interval(&self) -> Duration { match self { Self::Direct | Self::SupportCapture | Self::BufferDontFlush | Self::BufferDontFlushWith(_) => ZERO_DURATION, Self::BufferAndFlush => DEFAULT_FLUSH_INTERVAL, #[cfg(feature = "async")] Self::Async => DEFAULT_FLUSH_INTERVAL, Self::BufferAndFlushWith(_, flush_interval) => *flush_interval, #[cfg(feature = "async")] Self::AsyncWith { pool_capa: _, message_capa: _, flush_interval, } => *flush_interval, } } } pub(crate) enum EffectiveWriteMode { Direct, BufferAndFlushWith(usize), #[cfg_attr(docsrs, doc(cfg(feature = "async")))] #[cfg(feature = "async")] AsyncWith { /// Capacity of the pool for the message buffers. pool_capa: usize, /// Capacity of an individual message buffer. message_capa: usize, /// The interval for flushing the output. /// /// With `Duration::ZERO` flushing is suppressed. flush_interval: Duration, }, BufferDontFlushWith(usize), } flexi_logger-0.29.8/src/writers/file_log_writer/builder.rs000064400000000000000000000300611046102023000220300ustar 00000000000000use crate::flexi_error::FlexiLoggerError; use crate::formats::default_format; use crate::{Cleanup, Criterion, FileSpec, FormatFunction, Naming, WriteMode}; use std::path::{Path, PathBuf}; use std::sync::Arc; use super::{FileLogWriter, FileLogWriterConfig, LogWriter, RotationConfig, State}; /// Builder for [`FileLogWriter`]. #[allow(clippy::struct_excessive_bools, clippy::module_name_repetitions)] pub struct FileLogWriterBuilder { cfg_print_message: bool, cfg_append: bool, cfg_write_mode: WriteMode, file_spec: FileSpec, cfg_o_create_symlink: Option, cfg_line_ending: &'static [u8], format: FormatFunction, o_rotation_config: Option, max_log_level: log::LevelFilter, cleanup_in_background_thread: bool, use_utc: bool, } /// Methods for influencing the behavior of the [`FileLogWriter`]. impl FileLogWriterBuilder { pub(crate) fn new(file_spec: FileSpec) -> Self { Self { o_rotation_config: None, cfg_print_message: false, file_spec, cfg_append: false, cfg_write_mode: WriteMode::Direct, cfg_o_create_symlink: None, cfg_line_ending: super::UNIX_LINE_ENDING, format: default_format, max_log_level: log::LevelFilter::Trace, cleanup_in_background_thread: true, use_utc: false, } } /// Makes the [`FileLogWriter`] print an info message to stdout /// when a new file is used for log-output. #[must_use] pub fn print_message(mut self) -> Self { self.cfg_print_message = true; self } /// Makes the [`FileLogWriter`] use the provided format function for the log entries, /// rather than [`default_format`]. #[must_use] pub fn format(mut self, format: FormatFunction) -> Self { self.format = format; self } /// Influences how the cleanup activities /// (finding files, deleting files, optionally compressing files) are done /// when rotation is used with some [`Cleanup`] variant. /// /// With the synchronous [write modes](crate::WriteMode), /// the cleanup activities are done by default in a dedicated background thread, to /// minimize the blocking impact on your application. /// You can avoid this extra thread by calling this method with /// `use_background_thread = false`; the cleanup is then done synchronously /// by the thread that is currently logging and - by chance - causing a file rotation. /// /// With [`WriteMode::AsyncWith`](crate::WriteMode::AsyncWith), /// the cleanup activities are always done by the same background thread /// that also does the file I/O, this method then has no effect. #[must_use] pub fn cleanup_in_background_thread(mut self, use_background_thread: bool) -> Self { self.cleanup_in_background_thread = use_background_thread; self } /// Use rotation to prevent indefinite growth of log files. /// /// By default, the log file is fixed while your program is running and will grow indefinitely. /// With this option being used, when the log file reaches the specified criterion, /// the file will be closed and a new file will be opened. /// /// Note that also the filename pattern changes: /// /// - by default, no timestamp is added to the filename /// - the logs are always written to a file with infix `_rCURRENT` /// - when the rotation criterion is fulfilled, it is closed and renamed to a file /// with another infix (see `Naming`), /// and then the logging continues again to the (fresh) file with infix `_rCURRENT`. /// /// Example: /// /// After some logging with your program `my_prog` and rotation with `Naming::Numbers`, /// you will find files like /// /// ```text /// my_prog_r00000.log /// my_prog_r00001.log /// my_prog_r00002.log /// my_prog_rCURRENT.log /// ``` /// /// The cleanup parameter allows defining the strategy for dealing with older files. /// See [`Cleanup`] for details. #[must_use] pub fn rotate(mut self, criterion: Criterion, naming: Naming, cleanup: Cleanup) -> Self { self.o_rotation_config = Some(RotationConfig { criterion, naming, cleanup, }); self.file_spec.if_default_use_timestamp(false); self } /// Set the file spec. #[must_use] pub(crate) fn file_spec(mut self, mut file_spec: FileSpec) -> Self { if self.o_rotation_config.is_some() { file_spec.if_default_use_timestamp(false); } self.file_spec = file_spec; self } /// Makes the logger append to the given file, if it exists; by default, the file would be /// truncated. #[must_use] pub fn append(mut self) -> Self { self.cfg_append = true; self } /// Set the maximum log level. /// /// The default is `log::LevelFilter::Trace`, i.e., all log levels are written. #[must_use] pub fn max_level(mut self, max_log_level: log::LevelFilter) -> Self { self.max_log_level = max_log_level; self } /// Enforces the use of UTC, rather than local time. #[must_use] pub fn use_utc(mut self) -> Self { self.file_spec.use_utc = true; self.use_utc = true; self } /// The specified String will be used on unix systems to create in the current folder /// a symbolic link to the current log file. #[must_use] pub fn create_symlink>(mut self, symlink: P) -> Self { self.cfg_o_create_symlink = Some(symlink.into()); self } /// Use Windows line endings, rather than just `\n`. #[must_use] pub fn use_windows_line_ending(mut self) -> Self { self.cfg_line_ending = super::WINDOWS_LINE_ENDING; self } /// Sets the write mode for the `FileLogWriter`. /// /// See [`WriteMode`] for more (important!) details. #[must_use] pub fn write_mode(mut self, write_mode: WriteMode) -> Self { self.cfg_write_mode = write_mode; self } pub(crate) fn assert_write_mode(&self, write_mode: WriteMode) -> Result<(), FlexiLoggerError> { if self.cfg_write_mode == write_mode { Ok(()) } else { Err(FlexiLoggerError::Reset) } } #[must_use] pub(crate) fn get_write_mode(&self) -> &WriteMode { &self.cfg_write_mode } /// Produces the `FileLogWriter`. /// /// # Errors /// /// `FlexiLoggerError::Io` if the specified path doesn't work. pub fn try_build(self) -> Result { Ok(FileLogWriter::new( self.try_build_state()?, self.max_log_level, self.format, )) } /// Produces the `FileLogWriter` and a handle that is connected with it. /// /// This allows handing out the `FileLogWriter` instance to methods that consume it, and still /// be able to influence it via the handle. /// /// # Errors /// /// `FlexiLoggerError::Io` if the specified path doesn't work. pub fn try_build_with_handle( self, ) -> Result<(ArcFileLogWriter, FileLogWriterHandle), FlexiLoggerError> { Ok(ArcFileLogWriter::new_with_handle(FileLogWriter::new( self.try_build_state()?, self.max_log_level, self.format, ))) } pub(super) fn try_build_state(&self) -> Result { // make sure the folder exists or create it let dir = self.file_spec.get_directory(); let p_directory = Path::new(&dir); std::fs::create_dir_all(p_directory)?; if !std::fs::metadata(p_directory)?.is_dir() { return Err(FlexiLoggerError::OutputBadDirectory); }; #[cfg(feature = "async")] let cleanup_in_background_thread = if let WriteMode::AsyncWith { pool_capa: _, message_capa: _, flush_interval: _, } = self.cfg_write_mode { false } else { self.cleanup_in_background_thread }; #[cfg(not(feature = "async"))] let cleanup_in_background_thread = self.cleanup_in_background_thread; Ok(State::new( FileLogWriterConfig { print_message: self.cfg_print_message, append: self.cfg_append, line_ending: self.cfg_line_ending, write_mode: self.cfg_write_mode, file_spec: self.file_spec.clone(), o_create_symlink: self.cfg_o_create_symlink.clone(), use_utc: self.use_utc, }, self.o_rotation_config.clone(), cleanup_in_background_thread, )) } } /// Alternative set of methods to control the behavior of the `FileLogWriterBuilder`. /// Use these methods when you want to control the settings flexibly, /// e.g. with commandline arguments via `docopts` or `clap`. impl FileLogWriterBuilder { /// With true, makes the `FileLogWriterBuilder` print an info message to stdout, each time /// when a new file is used for log-output. #[must_use] pub fn o_print_message(mut self, print_message: bool) -> Self { self.cfg_print_message = print_message; self } /// By default, and with None, the log file will grow indefinitely. /// If a `rotate_config` is set, when the log file reaches or exceeds the specified size, /// the file will be closed and a new file will be opened. /// Also the filename pattern changes: instead of the timestamp, a serial number /// is included into the filename. /// /// The size is given in bytes, e.g. `o_rotate_over_size(Some(1_000))` will rotate /// files once they reach a size of 1 kB. /// /// The cleanup strategy allows delimiting the used space on disk. #[must_use] pub fn o_rotate(mut self, rotate_config: Option<(Criterion, Naming, Cleanup)>) -> Self { if let Some((criterion, naming, cleanup)) = rotate_config { self.o_rotation_config = Some(RotationConfig { criterion, naming, cleanup, }); self.file_spec.if_default_use_timestamp(false); } else { self.o_rotation_config = None; self.file_spec.if_default_use_timestamp(true); } self } /// If append is set to true, makes the logger append to the given file, if it exists. /// By default, or with false, the file would be truncated. #[must_use] pub fn o_append(mut self, append: bool) -> Self { self.cfg_append = append; self } /// If a String is specified, it will be used on unix systems to create in the current folder /// a symbolic link with this name to the current log file. #[must_use] pub fn o_create_symlink>(mut self, symlink: Option) -> Self { self.cfg_o_create_symlink = symlink.map(Into::into); self } } /// A shareable `FileLogWriter` with a handle. pub struct ArcFileLogWriter(Arc); impl ArcFileLogWriter { pub(crate) fn new_with_handle(flw: FileLogWriter) -> (Self, FileLogWriterHandle) { let a_flw = Arc::new(flw); (Self(Arc::clone(&a_flw)), FileLogWriterHandle(a_flw)) } } impl std::ops::Deref for ArcFileLogWriter { type Target = FileLogWriter; fn deref(&self) -> &FileLogWriter { &(self.0) } } impl Clone for ArcFileLogWriter { fn clone(&self) -> Self { Self(Arc::clone(&self.0)) } } impl std::io::Write for ArcFileLogWriter { fn write(&mut self, buffer: &[u8]) -> std::result::Result { (*self.0).plain_write(buffer) } fn flush(&mut self) -> std::result::Result<(), std::io::Error> { LogWriter::flush(&*self.0) } } /// Handle to the `FileLogWriter` in an `ArcFileLogWriter` /// that shuts down the `FileLogWriter` in its `Drop` implementation. pub struct FileLogWriterHandle(Arc); impl Drop for FileLogWriterHandle { fn drop(&mut self) { self.0.shutdown(); } } flexi_logger-0.29.8/src/writers/file_log_writer/config.rs000064400000000000000000000036271046102023000216570ustar 00000000000000use crate::{Cleanup, Criterion, FileSpec, Naming, WriteMode}; use std::path::PathBuf; /// Describes how rotation should work #[derive(Clone, Debug)] pub struct RotationConfig { // Defines if rotation should be based on size or date pub(crate) criterion: Criterion, // Defines if rotated files should be numbered or get a date-based name pub(crate) naming: Naming, // Defines the cleanup strategy pub(crate) cleanup: Cleanup, } /// Configuration of a `FileLogWriter`. #[derive(Debug, Clone)] pub struct FileLogWriterConfig { pub(crate) print_message: bool, pub(crate) append: bool, pub(crate) write_mode: WriteMode, pub(crate) file_spec: FileSpec, pub(crate) o_create_symlink: Option, pub(crate) line_ending: &'static [u8], pub(crate) use_utc: bool, } impl FileLogWriterConfig { /// Returns the configured directory. #[must_use] pub fn directory(&self) -> &std::path::Path { self.file_spec.directory.as_path() } /// Returns the configured `basename` of the log file. #[must_use] pub fn basename(&self) -> &str { &self.file_spec.basename } /// Returns the configured `discriminant`. #[must_use] pub fn discriminant(&self) -> Option { self.file_spec.o_discriminant.clone() } /// Returns the configured `suffix`. #[must_use] pub fn suffix(&self) -> Option { self.file_spec.get_suffix() } /// Returns `true` if UTC is enforced. #[must_use] pub fn use_utc(&self) -> bool { self.use_utc } /// Returns `true` if existing files are appended on program start. #[must_use] pub fn append(&self) -> bool { self.append } /// Returns `true` if a message should be printed on program start /// to which file the log is written. #[must_use] pub fn print_message(&self) -> bool { self.print_message } } flexi_logger-0.29.8/src/writers/file_log_writer/infix_filter.rs000064400000000000000000000016641046102023000230730ustar 00000000000000use super::state::{timestamp_from_ts_infix, InfixFormat}; #[derive(Clone)] pub(crate) enum InfixFilter { Timstmps(InfixFormat), Numbrs, #[cfg(test)] StartsWth(String), Equls(String), None, } impl InfixFilter { pub(crate) fn filter_infix(&self, infix: &str) -> bool { match self { InfixFilter::Timstmps(infix_format) => { timestamp_from_ts_infix(infix, infix_format).is_ok() } InfixFilter::Numbrs => { if infix.len() > 2 { let mut chars = infix.chars(); chars.next().unwrap() == 'r' && chars.next().unwrap().is_ascii_digit() } else { false } } #[cfg(test)] InfixFilter::StartsWth(s) => infix.starts_with(s), InfixFilter::Equls(s) => infix.eq(s), InfixFilter::None => false, } } } flexi_logger-0.29.8/src/writers/file_log_writer/state/list_and_cleanup.rs000064400000000000000000000142111046102023000250250ustar 00000000000000use super::InfixFilter; use crate::{Cleanup, FileSpec, LogfileSelector}; #[cfg(feature = "compress")] use std::fs::File; use std::{ path::PathBuf, thread::{Builder as ThreadBuilder, JoinHandle}, }; pub(super) fn list_of_log_and_compressed_files( file_spec: &FileSpec, infix_filter: &InfixFilter, ) -> Vec { existing_log_files( file_spec, true, infix_filter, &LogfileSelector::default().with_compressed_files(), ) } pub(super) fn existing_log_files( file_spec: &FileSpec, use_rotation: bool, infix_filter: &InfixFilter, selector: &LogfileSelector, ) -> Vec { let mut result = Vec::new(); let related_files = file_spec.read_dir_related_files(); if use_rotation { if selector.with_plain_files { result.append(&mut file_spec.filter_files( &related_files, infix_filter, file_spec.get_suffix().as_deref(), )); } if selector.with_compressed_files { result.append(&mut file_spec.filter_files(&related_files, infix_filter, Some("gz"))); } if selector.with_r_current { result.append(&mut file_spec.filter_files( &related_files, &InfixFilter::Equls(super::CURRENT_INFIX.to_string()), file_spec.get_suffix().as_deref(), )); } if let Some(ref custom_current) = selector.with_configured_current { result.append(&mut file_spec.filter_files( &related_files, &InfixFilter::Equls(custom_current.to_string()), file_spec.get_suffix().as_deref(), )); } } else { result.push(file_spec.as_pathbuf(None)); } result } pub(super) fn remove_or_compress_too_old_logfiles( o_cleanup_thread_handle: Option<&CleanupThreadHandle>, cleanup_config: &Cleanup, file_spec: &FileSpec, infix_filter: &InfixFilter, writes_direct: bool, ) -> Result<(), std::io::Error> { o_cleanup_thread_handle.as_ref().map_or_else( || { remove_or_compress_too_old_logfiles_impl( cleanup_config, file_spec, infix_filter, writes_direct, ) }, |cleanup_thread_handle| { cleanup_thread_handle .sender .send(MessageToCleanupThread::Act) .ok(); Ok(()) }, ) } pub(crate) fn remove_or_compress_too_old_logfiles_impl( cleanup_config: &Cleanup, file_spec: &FileSpec, infix_filter: &InfixFilter, writes_direct: bool, ) -> Result<(), std::io::Error> { let (mut log_limit, compress_limit) = match *cleanup_config { Cleanup::Never => { return Ok(()); } Cleanup::KeepLogFiles(log_limit) => (log_limit, 0), #[cfg(feature = "compress")] Cleanup::KeepCompressedFiles(compress_limit) => (0, compress_limit), #[cfg(feature = "compress")] Cleanup::KeepLogAndCompressedFiles(log_limit, compress_limit) => { (log_limit, compress_limit) } }; // we must not clean up the current output file if writes_direct && log_limit == 0 { log_limit = 1; } for (index, file) in list_of_log_and_compressed_files(file_spec, infix_filter) .into_iter() .enumerate() { if index >= log_limit + compress_limit { // delete (log or log.gz) std::fs::remove_file(file)?; } else if index >= log_limit { #[cfg(feature = "compress")] { // compress, if not yet compressed if let Some(extension) = file.extension() { if extension != "gz" { let mut compressed_file = file.clone(); match compressed_file.extension() { Some(oss) => { let mut oss_gz = oss.to_os_string(); oss_gz.push(".gz"); compressed_file.set_extension(oss_gz.as_os_str()); } None => { compressed_file.set_extension("gz"); } } let mut gz_encoder = flate2::write::GzEncoder::new( File::create(compressed_file)?, flate2::Compression::fast(), ); let mut old_file = File::open(file.clone())?; std::io::copy(&mut old_file, &mut gz_encoder)?; gz_encoder.finish()?; std::fs::remove_file(&file)?; } } } } } Ok(()) } const CLEANER: &str = "flexi_logger-fs-cleanup"; #[derive(Debug)] pub(super) struct CleanupThreadHandle { sender: std::sync::mpsc::Sender, join_handle: JoinHandle<()>, } enum MessageToCleanupThread { Act, Die, } impl CleanupThreadHandle { pub(super) fn shutdown(self) { self.sender.send(MessageToCleanupThread::Die).ok(); self.join_handle.join().ok(); } } pub(super) fn start_cleanup_thread( cleanup: Cleanup, file_spec: FileSpec, infix_filter: &InfixFilter, writes_direct: bool, ) -> Result { let (sender, receiver) = std::sync::mpsc::channel(); let builder = ThreadBuilder::new().name(CLEANER.to_string()); #[cfg(not(feature = "dont_minimize_extra_stacks"))] let builder = builder.stack_size(512 * 1024); let infix_filter_cp = infix_filter.clone(); Ok(CleanupThreadHandle { sender, join_handle: builder.spawn(move || { while let Ok(MessageToCleanupThread::Act) = receiver.recv() { remove_or_compress_too_old_logfiles_impl( &cleanup, &file_spec, &infix_filter_cp, writes_direct, ) .ok(); } })?, }) } flexi_logger-0.29.8/src/writers/file_log_writer/state/numbers.rs000064400000000000000000000042711046102023000232010ustar 00000000000000//! The infix for rotated files contains an index number. use super::{InfixFilter, CURRENT_INFIX}; use crate::{writers::FileLogWriterConfig, FileSpec}; use std::cmp::max; pub(super) fn number_infix(idx: u32) -> String { format!("r{idx:0>5}") } pub(super) fn index_for_rcurrent( config: &FileLogWriterConfig, o_index_for_rcurrent: Option, rotate_rcurrent: bool, ) -> Result { // we believe what we get - but if we get nothing, we determine what's next // according to the filesystem let mut index_for_rcurrent = o_index_for_rcurrent .or_else(|| get_highest_index(&config.file_spec).map(|idx| idx + 1)) .unwrap_or(0); if rotate_rcurrent { match std::fs::rename( config.file_spec.as_pathbuf(Some(CURRENT_INFIX)), config .file_spec .as_pathbuf(Some(&number_infix(index_for_rcurrent))), ) { Ok(()) => { index_for_rcurrent += 1; } Err(e) => { if e.kind() != std::io::ErrorKind::NotFound { return Err(e); } } } } Ok(index_for_rcurrent) } pub(super) fn get_highest_index(file_spec: &FileSpec) -> Option { let mut o_highest_idx = None; for file in super::list_and_cleanup::list_of_log_and_compressed_files(file_spec, &InfixFilter::Numbrs) { let name = file.file_stem().unwrap(/*ok*/).to_string_lossy(); let infix = if file_spec.has_basename() || file_spec.has_discriminant() || file_spec.uses_timestamp() { // infix is the last, but not the first part of the name, starts with _r match name.rsplit("_r").next() { Some(infix) => infix, None => continue, // ignore unexpected files } } else { // infix is the only part of the name, just skip over the r &name[1..] }; let idx: u32 = infix.parse().unwrap_or(0); o_highest_idx = match o_highest_idx { None => Some(idx), Some(prev) => Some(max(prev, idx)), }; } o_highest_idx } flexi_logger-0.29.8/src/writers/file_log_writer/state/timestamps.rs000064400000000000000000000133601046102023000237130ustar 00000000000000use super::{get_creation_timestamp, InfixFilter, InfixFormat}; use crate::{writers::FileLogWriterConfig, FileSpec}; use chrono::{format::ParseErrorKind, DateTime, Local, NaiveDate, NaiveDateTime, TimeZone}; use std::path::{Path, PathBuf}; pub(super) fn infix_from_timestamp( ts: &DateTime, use_utc: bool, fmt: &InfixFormat, ) -> String { if use_utc { ts.naive_utc().format(fmt.format()) } else { ts.format(fmt.format()) } .to_string() } fn ts_infix_from_path(path: &Path, file_spec: &FileSpec) -> String { let idx = file_spec .as_pathbuf(Some("rXXXXX")) .to_string_lossy() .find("rXXXXX") .unwrap(); String::from_utf8_lossy(&path.to_string_lossy().as_bytes()[idx..idx + 20]).to_string() } pub(crate) fn timestamp_from_ts_infix( infix: &str, fmt: &InfixFormat, ) -> Result, String> { match NaiveDateTime::parse_from_str(infix, fmt.format()) { Ok(dt1) => Local .from_local_datetime(&dt1) .earliest() .ok_or("Can't determine local time from infix".to_string()), Err(e) if e.kind() == ParseErrorKind::NotEnough => { match NaiveDate::parse_from_str(infix, fmt.format()) { Ok(d1) => { Local .from_local_datetime(&d1.and_hms_opt(10, 0, 0).unwrap(/*OK*/)) .earliest() .ok_or("Can't determine local time from infix".to_string()) } Err(e) => Err(format!("Broken: {e:?}")), } } Err(e) => Err(format!("Broken: {e:?}")), } } pub(super) fn creation_timestamp_of_currentfile( config: &FileLogWriterConfig, current_infix: &str, rotate_rcurrent: bool, o_date_for_rotated_file: Option<&DateTime>, fmt: &InfixFormat, ) -> Result, std::io::Error> { let current_path = config.file_spec.as_pathbuf(Some(current_infix)); if rotate_rcurrent { let date_for_rotated_file = o_date_for_rotated_file .copied() .unwrap_or_else(|| get_creation_timestamp(¤t_path)); let rotated_path = path_for_rotated_file_from_timestamp( &config.file_spec, config.use_utc, &date_for_rotated_file, fmt, ); match std::fs::rename(current_path.clone(), rotated_path.clone()) { Ok(()) => {} Err(e) => { if e.kind() != std::io::ErrorKind::NotFound { return Err(e); } } } } Ok(get_creation_timestamp(¤t_path)) } // determine the timestamp to which we want to write (file needn't exist) pub(super) fn latest_timestamp_file( config: &FileLogWriterConfig, rotate: bool, fmt: &InfixFormat, ) -> DateTime { if rotate { Local::now() } else { // find all file paths that fit the pattern config .file_spec .list_of_files( &InfixFilter::Numbrs, config.file_spec.get_suffix().as_deref(), ) .into_iter() // retrieve the infix .map(|path| ts_infix_from_path(&path, &config.file_spec)) // parse infix as date, ignore all infixes where this fails .filter_map(|infix| timestamp_from_ts_infix(&infix, fmt).ok()) // take the newest of these dates .reduce(|acc, e| if acc > e { acc } else { e }) // if nothing is found, take Local::now() .unwrap_or_else(Local::now) } } fn path_for_rotated_file_from_timestamp( file_spec: &FileSpec, use_utc: bool, timestamp_for_rotated_file: &DateTime, fmt: &InfixFormat, ) -> PathBuf { let infix = file_spec.collision_free_infix_for_rotated_file(&infix_from_timestamp( timestamp_for_rotated_file, use_utc, fmt, )); file_spec.as_pathbuf(Some(&infix)) } #[cfg(test)] mod test { use super::InfixFormat; use crate::FileSpec; use chrono::{Duration, Local}; use std::path::PathBuf; #[test] fn test_latest_timestamp_file() { let file_spec = FileSpec::default() .basename("basename") .directory("direc/tory") .discriminant("disc") .suppress_timestamp(); let now = Local::now(); let now_rounded = now .checked_sub_signed( Duration::from_std(std::time::Duration::from_nanos(u64::from( now.timestamp_subsec_nanos(), ))) .unwrap(), ) .unwrap(); let paths: Vec = (0..10) .map(|i| now_rounded - Duration::try_seconds(i).unwrap()) .map(|ts| { file_spec.as_pathbuf(Some(&super::infix_from_timestamp( &ts, false, &InfixFormat::Std, ))) }) .collect(); let newest = paths .iter() // retrieve the infix .map(|path| super::ts_infix_from_path(path, &file_spec)) // parse infix as date, ignore all files where this fails, .filter_map(|infix| super::timestamp_from_ts_infix(&infix, &InfixFormat::Std).ok()) // take the newest of these dates .reduce(|acc, e| if acc > e { acc } else { e }) // if nothing is found, take Local::now() .unwrap_or_else(Local::now); assert_eq!( now_rounded, // TODO: use mocking to avoid code duplication: // this test is only useful if the path evaluation is the same as in // super::latest_timestamp_file() newest ); } } flexi_logger-0.29.8/src/writers/file_log_writer/state.rs000064400000000000000000000664021046102023000215320ustar 00000000000000mod list_and_cleanup; mod numbers; mod timestamps; pub(crate) use timestamps::timestamp_from_ts_infix; use super::{ config::{FileLogWriterConfig, RotationConfig}, InfixFilter, }; #[cfg(feature = "async")] use crate::util::eprint_msg; use crate::{ util::{eprint_err, ErrorCode}, Age, Cleanup, Criterion, FlexiLoggerError, LogfileSelector, Naming, }; use chrono::{DateTime, Datelike, Local, Timelike}; #[cfg(feature = "async")] use std::thread::JoinHandle; use std::{ fs::{remove_file, File, OpenOptions}, io::{BufRead, BufReader, BufWriter, Write}, path::{Path, PathBuf}, sync::{Arc, Mutex}, }; use timestamps::{creation_timestamp_of_currentfile, infix_from_timestamp, latest_timestamp_file}; #[cfg(feature = "async")] const ASYNC_FLUSHER: &str = "flexi_logger-fs-async_flusher"; #[cfg(feature = "async")] use { crate::util::{ASYNC_FLUSH, ASYNC_SHUTDOWN}, crossbeam_channel::Sender as CrossbeamSender, crossbeam_queue::ArrayQueue, }; #[cfg(feature = "async")] const ASYNC_WRITER: &str = "flexi_logger-async_file_writer"; const CURRENT_INFIX: &str = "rCURRENT"; #[derive(Debug)] enum NamingState { // Contains the timestamp of the current output file (read from its name), // plus the optional current infix and the format of the timestamp infix Timestamps { current_timestamp: DateTime, the_current_infix: Option, infix_format: InfixFormat, }, // contains the index to which we will rotate NumbersRCurrent(u32), // contains the index of the current output file NumbersDirect(u32), } impl NamingState { pub(crate) fn writes_direct(&self) -> bool { matches!( self, NamingState::NumbersDirect(_) | NamingState::Timestamps { current_timestamp: _, the_current_infix: None, infix_format: _, } ) } pub(crate) fn infix_filter(&self) -> InfixFilter { match self { NamingState::Timestamps { current_timestamp: _, the_current_infix: _, infix_format, } => InfixFilter::Timstmps(infix_format.clone()), NamingState::NumbersDirect(_) | NamingState::NumbersRCurrent(_) => InfixFilter::Numbrs, } } } #[derive(Clone, Debug)] pub(crate) enum InfixFormat { Std, Custom(String), } impl InfixFormat { const STD_INFIX_FORMAT: &'static str = "r%Y-%m-%d_%H-%M-%S"; pub(super) fn custom(fmt: &str) -> Self { Self::Custom(fmt.to_string()) } fn format(&self) -> &str { match self { Self::Std => Self::STD_INFIX_FORMAT, Self::Custom(fmt) => fmt, } } } #[derive(Debug)] enum RollState { Size { max_size: u64, current_size: u64, }, Age { age: Age, created_at: DateTime, }, AgeOrSize { age: Age, created_at: DateTime, max_size: u64, current_size: u64, }, } impl RollState { fn new(criterion: Criterion, append: bool, path: &Path) -> Result { let current_size = if append { std::fs::metadata(path)?.len() } else { 0 }; let created_at = get_creation_timestamp(path); Ok(match criterion { Criterion::Age(age) => RollState::Age { age, created_at }, Criterion::Size(max_size) => RollState::Size { max_size, current_size, }, Criterion::AgeOrSize(age, max_size) => RollState::AgeOrSize { age, created_at, max_size, current_size, }, }) } fn rotation_necessary(&self) -> bool { match &self { RollState::Size { max_size, current_size, } => Self::size_rotation_necessary(*max_size, *current_size), RollState::Age { age, created_at } => Self::age_rotation_necessary(*age, created_at), RollState::AgeOrSize { age, created_at, max_size, current_size, } => { Self::size_rotation_necessary(*max_size, *current_size) || Self::age_rotation_necessary(*age, created_at) } } } fn size_rotation_necessary(max_size: u64, current_size: u64) -> bool { current_size > max_size } fn age_rotation_necessary(age: Age, created_at: &DateTime) -> bool { let now = Local::now(); match age { Age::Day => { created_at.year() != now.year() || created_at.month() != now.month() || created_at.day() != now.day() } Age::Hour => { created_at.year() != now.year() || created_at.month() != now.month() || created_at.day() != now.day() || created_at.hour() != now.hour() } Age::Minute => { created_at.year() != now.year() || created_at.month() != now.month() || created_at.day() != now.day() || created_at.hour() != now.hour() || created_at.minute() != now.minute() } Age::Second => { created_at.year() != now.year() || created_at.month() != now.month() || created_at.day() != now.day() || created_at.hour() != now.hour() || created_at.minute() != now.minute() || created_at.second() != now.second() } } } fn reset_size_and_date(&mut self, path: &Path) { match self { RollState::Size { max_size: _, current_size, } => { *current_size = 0; } RollState::Age { age: _, created_at } => { *created_at = get_creation_timestamp(path); } RollState::AgeOrSize { age: _, created_at, max_size: _, current_size, } => { *created_at = get_creation_timestamp(path); *current_size = 0; } } } fn increase_size(&mut self, add: u64) { if let RollState::Size { max_size: _, ref mut current_size, } | RollState::AgeOrSize { age: _, created_at: _, max_size: _, ref mut current_size, } = *self { *current_size += add; } } } #[derive(Debug)] struct RotationState { naming_state: NamingState, roll_state: RollState, cleanup: Cleanup, o_cleanup_thread_handle: Option, } impl RotationState { fn shutdown(&mut self) { // this sets o_cleanup_thread_handle in self.state.o_rotation_state to None: let o_cleanup_thread_handle = self.o_cleanup_thread_handle.take(); if let Some(cleanup_thread_handle) = o_cleanup_thread_handle { cleanup_thread_handle.shutdown(); } } } enum Inner { Initial(Option, bool), Active(Option, Box, PathBuf), } impl Inner { fn uses_rotation(&self) -> bool { match self { Inner::Initial(o_r, _) => o_r.is_some(), Inner::Active(o_r, _, _) => o_r.is_some(), } } fn infix_filter(&self) -> InfixFilter { match self { Inner::Initial(_o_r, _) => InfixFilter::None, Inner::Active(o_r, _, _) => o_r .as_ref() .map_or(InfixFilter::None, |rs| rs.naming_state.infix_filter()), } } } impl std::fmt::Debug for Inner { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match self { Self::Initial(o_rot, b) => f.write_fmt(format_args!("Initial({o_rot:?}, {b}) ")), Self::Active(o_rot, _, _) => { f.write_fmt(format_args!("Active({o_rot:?}, ) ")) } } } } // The mutable state of a FileLogWriter. #[derive(Debug)] pub(super) struct State { config: FileLogWriterConfig, inner: Inner, } impl State { pub(super) fn new( config: FileLogWriterConfig, o_rotation_config: Option, cleanup_in_background_thread: bool, ) -> Self { Self { config, inner: Inner::Initial(o_rotation_config, cleanup_in_background_thread), } } fn initialize(&mut self) -> Result<(), std::io::Error> { if let Inner::Initial(o_rotation_config, cleanup_in_background_thread) = &self.inner { self.inner = match o_rotation_config { None => { // no rotation let (write, path) = open_log_file(&self.config, None)?; Inner::Active(None, write, path) } Some(rotate_config) => { self.initialize_with_rotation(rotate_config, *cleanup_in_background_thread)? } }; } Ok(()) } #[allow(clippy::too_many_lines)] fn initialize_with_rotation( &self, rotate_config: &RotationConfig, cleanup_in_background_thread: bool, ) -> Result { let (naming_state, infix) = match rotate_config.naming { Naming::TimestampsDirect => { let ts = latest_timestamp_file(&self.config, !self.config.append, &InfixFormat::Std); ( NamingState::Timestamps { current_timestamp: ts, the_current_infix: None, infix_format: InfixFormat::Std, }, infix_from_timestamp(&ts, self.config.use_utc, &InfixFormat::Std), ) } Naming::Timestamps => ( NamingState::Timestamps { current_timestamp: creation_timestamp_of_currentfile( &self.config, CURRENT_INFIX, !self.config.append, None, &InfixFormat::Std, )?, the_current_infix: Some(CURRENT_INFIX.to_string()), infix_format: InfixFormat::Std, }, CURRENT_INFIX.to_string(), ), Naming::TimestampsCustomFormat { current_infix: o_current_token, format: ts_fmt, } => { if let Some(current_token) = o_current_token { let current_infix = current_token.to_string(); let naming_state = NamingState::Timestamps { current_timestamp: creation_timestamp_of_currentfile( &self.config, ¤t_infix, !self.config.append, None, &InfixFormat::custom(ts_fmt), )?, the_current_infix: Some(current_infix.clone()), infix_format: InfixFormat::custom(ts_fmt), }; (naming_state, current_infix) } else { let fmt = InfixFormat::custom(ts_fmt); let ts = latest_timestamp_file(&self.config, !self.config.append, &fmt); let infix = infix_from_timestamp(&ts, self.config.use_utc, &fmt); ( NamingState::Timestamps { current_timestamp: ts, the_current_infix: None, infix_format: fmt, }, infix, ) } } Naming::Numbers => ( NamingState::NumbersRCurrent(numbers::index_for_rcurrent( &self.config, None, !self.config.append, )?), CURRENT_INFIX.to_string(), ), Naming::NumbersDirect => { let idx = match numbers::get_highest_index(&self.config.file_spec) { None => 0, Some(idx) => { if self.config.append { idx } else { idx + 1 } } }; (NamingState::NumbersDirect(idx), numbers::number_infix(idx)) } }; let (write, path) = open_log_file(&self.config, Some(&infix))?; let roll_state = RollState::new(rotate_config.criterion, self.config.append, &path)?; let o_cleanup_thread_handle = if rotate_config.cleanup.do_cleanup() { list_and_cleanup::remove_or_compress_too_old_logfiles( None, &rotate_config.cleanup, &self.config.file_spec, &naming_state.infix_filter(), rotate_config.naming.writes_direct(), )?; if cleanup_in_background_thread { Some(list_and_cleanup::start_cleanup_thread( rotate_config.cleanup, self.config.file_spec.clone(), &naming_state.infix_filter(), rotate_config.naming.writes_direct(), )?) } else { None } } else { None }; Ok(Inner::Active( Some(RotationState { naming_state, roll_state, cleanup: rotate_config.cleanup, o_cleanup_thread_handle, }), write, path, )) } pub fn config(&self) -> &FileLogWriterConfig { &self.config } pub fn flush(&mut self) -> std::io::Result<()> { if let Inner::Active(_, ref mut file, _) = self.inner { file.flush() } else { Ok(()) } } #[inline] pub(super) fn mount_next_linewriter_if_necessary( &mut self, force: bool, ) -> Result<(), FlexiLoggerError> { if let Inner::Active( Some(ref mut rotation_state), ref mut current_write, ref mut current_path, ) = self.inner { if force || rotation_state.roll_state.rotation_necessary() { let infix = match rotation_state.naming_state { NamingState::Timestamps { current_timestamp: ref mut ts, the_current_infix: ref o_current_infix, infix_format: ref fmt, } => { if let Some(current_infix) = o_current_infix { *ts = creation_timestamp_of_currentfile( &self.config, current_infix, true, Some(ts), fmt, )?; current_infix.clone() } else { *ts = Local::now(); self.config.file_spec.collision_free_infix_for_rotated_file( &infix_from_timestamp(ts, self.config.use_utc, fmt), ) } } NamingState::NumbersRCurrent(ref mut idx_state) => { *idx_state = numbers::index_for_rcurrent(&self.config, Some(*idx_state), true)?; CURRENT_INFIX.to_string() } NamingState::NumbersDirect(ref mut idx_state) => { *idx_state += 1; numbers::number_infix(*idx_state) } }; let (new_write, new_path) = open_log_file(&self.config, Some(&infix))?; *current_write = new_write; *current_path = new_path; rotation_state.roll_state.reset_size_and_date(current_path); list_and_cleanup::remove_or_compress_too_old_logfiles( rotation_state.o_cleanup_thread_handle.as_ref(), &rotation_state.cleanup, &self.config.file_spec, &rotation_state.naming_state.infix_filter(), rotation_state.naming_state.writes_direct(), )?; } } Ok(()) } pub(super) fn write_buffer(&mut self, buf: &[u8]) -> std::io::Result<()> { if let Inner::Initial(_, _) = self.inner { self.initialize()?; } // rotate if necessary self.mount_next_linewriter_if_necessary(false) .unwrap_or_else(|e| { eprint_err(ErrorCode::LogFile, "can't open file", &e); }); if let Inner::Active(ref mut o_rotation_state, ref mut log_file, ref _path) = self.inner { log_file.write_all(buf)?; if let Some(ref mut rotation_state) = o_rotation_state { rotation_state.roll_state.increase_size(buf.len() as u64); }; } Ok(()) } pub fn reopen_outputfile(&mut self) -> Result<(), std::io::Error> { if let Inner::Active(_, ref mut file, ref p_path) = self.inner { match OpenOptions::new().create(true).append(true).open(p_path) { Ok(f) => { // proved to work on standard windows, linux, mac *file = Box::new(f); } Err(_unexpected_error) => { // there are environments, like github's windows container, // where this extra step helps to overcome the _unexpected_error let mut dummy = PathBuf::from(p_path); dummy.set_extension("ShortLivingTempFileForReOpen"); *file = Box::new(OpenOptions::new().create(true).append(true).open(&dummy)?); remove_file(&dummy)?; *file = Box::new(OpenOptions::new().create(true).append(true).open(p_path)?); } } } Ok(()) } pub(crate) fn existing_log_files(&self, selector: &LogfileSelector) -> Vec { list_and_cleanup::existing_log_files( &self.config.file_spec, self.inner.uses_rotation(), &self.inner.infix_filter(), selector, ) } pub fn validate_logs(&mut self, expected: &[(&'static str, &'static str, &'static str)]) { if let Inner::Initial(_, _) = self.inner { self.initialize().expect("validate_logs: initialize failed"); }; if let Inner::Active(ref o_rotation_state, _, ref path) = self.inner { let rotation_possible = o_rotation_state.is_some(); let f = File::open(path.clone()).unwrap_or_else(|e| { panic!( "validate_logs: can't open file {} due to {e:?}", path.display(), ) }); let mut reader = BufReader::new(f); validate_logs_in_file(&mut reader, path, expected, rotation_possible); } else { unreachable!("oiuoiuoiusdsaaöld"); } } pub fn shutdown(&mut self) { if let Inner::Active(ref mut o_rotation_state, ref mut writer, _) = self.inner { if let Some(ref mut rotation_state) = o_rotation_state { rotation_state.shutdown(); } writer.flush().ok(); } } } fn validate_logs_in_file( reader: &mut dyn BufRead, path: &Path, expected: &[(&'static str, &'static str, &'static str)], rotation_possible: bool, ) { let warning = if rotation_possible { "Warning: Validation is not fully implemented for rotation, old files are ignored" } else { "" }; let mut buf = String::new(); for tuple in expected { buf.clear(); reader .read_line(&mut buf) .expect("validate_logs: can't read file"); assert!( buf.contains(tuple.0), "Did not find tuple.0 = {} in file {}; {}", tuple.0, path.display(), warning ); assert!( buf.contains(tuple.1), "Did not find tuple.1 = {} in file {}; {}", tuple.1, path.display(), warning ); assert!( buf.contains(tuple.2), "Did not find tuple.2 = {} in file {}; {}", tuple.2, path.display(), warning ); } buf.clear(); reader .read_line(&mut buf) .expect("validate_logs: can't read file"); assert!(buf.is_empty(), "Found more log lines than expected: {buf} "); } fn open_log_file( config: &FileLogWriterConfig, o_infix: Option<&str>, ) -> Result<(Box, PathBuf), std::io::Error> { let path = config.file_spec.as_pathbuf(o_infix); if config.print_message { println!("Log is written to {}", &path.display()); } if let Some(ref link) = config.o_create_symlink { self::platform::create_symlink_if_possible(link, &path); } let logfile = OpenOptions::new() .write(true) .create(true) .append(config.append) .truncate(!config.append) .open(&path)?; let w: Box = if let Some(capacity) = config.write_mode.buffersize() { Box::new(BufWriter::with_capacity(capacity, logfile)) } else { Box::new(logfile) }; Ok((w, path)) } fn get_creation_timestamp(path: &Path) -> DateTime { // On windows, we know that try_get_creation_date() returns a result, but it is wrong. if cfg!(target_os = "windows") { get_current_timestamp() } else { // On all others of the many platforms, we give the real creation date a try, // and fall back if it is not available. try_get_creation_timestamp(path) .or_else(|_| try_get_modification_timestamp(path)) .unwrap_or_else(|_| get_current_timestamp()) } } fn try_get_creation_timestamp(path: &Path) -> Result, FlexiLoggerError> { Ok(std::fs::metadata(path)?.created()?.into()) } fn try_get_modification_timestamp(path: &Path) -> Result, FlexiLoggerError> { let md = std::fs::metadata(path)?; let d = md.created().or_else(|_| md.modified())?; Ok(d.into()) } fn get_current_timestamp() -> DateTime { Local::now() } #[cfg(feature = "async")] pub(super) fn start_async_fs_writer( am_state: Arc>, message_capa: usize, a_pool: Arc>>, ) -> (CrossbeamSender>, Mutex>>) { let (sender, receiver) = crossbeam_channel::unbounded::>(); ( sender, Mutex::new(Some( std::thread::Builder::new() .name(ASYNC_WRITER.to_string()) .spawn(move || loop { match receiver.recv() { Err(_) => break, Ok(mut message) => { let mut state = am_state.lock().unwrap(/* ok */); match message.as_ref() { ASYNC_FLUSH => { state.flush().unwrap_or_else(|e| { eprint_err(ErrorCode::Flush, "flushing failed", &e); }); } ASYNC_SHUTDOWN => { state.shutdown(); break; } _ => { state.write_buffer(&message).unwrap_or_else(|e| { eprint_err(ErrorCode::Write, "writing failed", &e); }); } } if message.capacity() <= message_capa { message.clear(); a_pool.push(message).ok(); } } } }) .expect("Couldn't spawn flexi_logger-async_file_log_writer"), )), ) } pub(super) fn start_sync_flusher(am_state: Arc>, flush_interval: std::time::Duration) { let builder = std::thread::Builder::new().name("flexi_logger-file_flusher".to_string()); #[cfg(not(feature = "dont_minimize_extra_stacks"))] let builder = builder.stack_size(1024); builder.spawn(move || { let (_tx, rx) = std::sync::mpsc::channel::<()>(); loop { rx.recv_timeout(flush_interval).ok(); (*am_state).lock().map_or_else( |_e| (), |mut state| { state.flush().ok(); }, ); } }) .unwrap(/* yes, let's panic if the thread can't be spawned */); } #[cfg(feature = "async")] pub(crate) fn start_async_fs_flusher( async_writer: CrossbeamSender>, flush_interval: std::time::Duration, ) { let builder = std::thread::Builder::new().name(ASYNC_FLUSHER.to_string()); #[cfg(not(feature = "dont_minimize_extra_stacks"))] let builder = builder.stack_size(1024); builder.spawn(move || { let (_tx, rx) = std::sync::mpsc::channel::<()>(); loop { if let Err(std::sync::mpsc::RecvTimeoutError::Disconnected) = rx.recv_timeout(flush_interval) { eprint_msg(ErrorCode::Flush, "Flushing unexpectedly stopped working"); break; } async_writer.send(ASYNC_FLUSH.to_vec()).ok(); } }) .unwrap(/* yes, let's panic if the thread can't be spawned */); } mod platform { #[cfg(target_family = "unix")] use crate::util::{eprint_err, ErrorCode}; use std::path::Path; pub fn create_symlink_if_possible(link: &Path, path: &Path) { unix_create_symlink(link, path); } #[cfg(target_family = "unix")] fn unix_create_symlink(link: &Path, logfile: &Path) { if std::fs::symlink_metadata(link).is_ok() { // remove old symlink before creating a new one if let Err(e) = std::fs::remove_file(link) { eprint_err(ErrorCode::Symlink, "cannot delete symlink to log file", &e); } } // create new symlink if let Err(e) = std::os::unix::fs::symlink(logfile, link) { eprint_err(ErrorCode::Symlink, "cannot create symlink to logfile", &e); } } #[cfg(not(target_family = "unix"))] fn unix_create_symlink(_: &Path, _: &Path) {} } flexi_logger-0.29.8/src/writers/file_log_writer/state_handle.rs000064400000000000000000000277771046102023000230610ustar 00000000000000use super::{builder::FileLogWriterBuilder, config::FileLogWriterConfig, state::State}; #[cfg(feature = "async")] use crate::util::{ASYNC_FLUSH, ASYNC_SHUTDOWN}; use crate::{ util::{buffer_with, eprint_err, io_err, ErrorCode}, LogfileSelector, ZERO_DURATION, }; use crate::{DeferredNow, FlexiLoggerError, FormatFunction}; use log::Record; #[cfg(feature = "async")] use std::thread::JoinHandle; use std::{ io::Write, path::PathBuf, sync::{Arc, Mutex}, }; #[cfg(feature = "async")] use {crossbeam_channel::Sender, crossbeam_queue::ArrayQueue}; #[derive(Debug)] pub(super) enum StateHandle { Sync(SyncHandle), #[cfg(feature = "async")] Async(AsyncHandle), } pub(super) struct SyncHandle { am_state: Arc>, format_function: FormatFunction, line_ending: &'static [u8], } impl SyncHandle { fn new(state: State, format_function: FormatFunction) -> Self { let line_ending = state.config().line_ending; let flush_interval = state.config().write_mode.get_flush_interval(); let am_state = Arc::new(Mutex::new(state)); if flush_interval != ZERO_DURATION { super::state::start_sync_flusher(Arc::clone(&am_state), flush_interval); } Self { am_state, format_function, line_ending, } } } impl std::fmt::Debug for SyncHandle { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { f.debug_struct("SyncHandle") .field("am_state", &self.am_state) .field("format", &"<..>") .field("line_ending", &self.line_ending) .finish_non_exhaustive() } } #[cfg(feature = "async")] pub(super) struct AsyncHandle { am_state: Arc>, sender: Sender>, mo_thread_handle: Mutex>>, a_pool: Arc>>, message_capa: usize, format_function: FormatFunction, line_ending: &'static [u8], } #[cfg(feature = "async")] impl AsyncHandle { fn new( pool_capa: usize, message_capa: usize, state: State, format_function: FormatFunction, ) -> Self { let flush_interval = state.config().write_mode.get_flush_interval(); let line_ending = state.config().line_ending; let am_state = Arc::new(Mutex::new(state)); let a_pool = Arc::new(ArrayQueue::new(pool_capa)); let (sender, mo_thread_handle) = super::state::start_async_fs_writer( Arc::clone(&am_state), message_capa, Arc::clone(&a_pool), ); if flush_interval != ZERO_DURATION { super::state::start_async_fs_flusher(sender.clone(), flush_interval); } Self { am_state, sender, mo_thread_handle, a_pool, message_capa, format_function, line_ending, } } fn write(&self, now: &mut DeferredNow, record: &Record) -> Result<(), std::io::Error> { let mut buffer = self.pop_buffer(); (self.format_function)(&mut buffer, now, record).map_err(|e| { eprint_err(ErrorCode::Format, "formatting failed", &e); e })?; buffer.write_all(self.line_ending).map_err(|e| { eprint_err(ErrorCode::Write, "writing failed", &e); e })?; self.sender.send(buffer).map_err(|_e| io_err("Send")) } fn pop_buffer(&self) -> Vec { self.a_pool .pop() .unwrap_or_else(|| Vec::with_capacity(self.message_capa)) } } #[cfg(feature = "async")] impl std::fmt::Debug for AsyncHandle { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { f.debug_struct("AsyncHandle") .field("am_state", &self.am_state) .field("sender", &self.sender) .field("mo_thread_handle", &self.mo_thread_handle) .field("a_pool", &self.a_pool) .field("message_capa", &self.message_capa) .field("format", &"<..>") .field("line_ending", &self.line_ending) .finish_non_exhaustive() } } impl StateHandle { // produce a StateHandle::Sync, optionally with an own flusher-thread pub(super) fn new_sync(state: State, format_function: FormatFunction) -> StateHandle { StateHandle::Sync(SyncHandle::new(state, format_function)) } // produce a StateHandle::Async with its writer-thread, and optionally an own flusher-thread #[cfg(feature = "async")] pub(super) fn new_async( pool_capa: usize, message_capa: usize, state: State, format_function: FormatFunction, ) -> Self { Self::Async(AsyncHandle::new( pool_capa, message_capa, state, format_function, )) } pub(super) fn format_function(&self) -> FormatFunction { match self { StateHandle::Sync(handle) => handle.format_function, #[cfg(feature = "async")] StateHandle::Async(handle) => handle.format_function, } } pub(super) fn plain_write(&self, buffer: &[u8]) -> std::result::Result { match self { StateHandle::Sync(handle) => { let mut state_guard = handle.am_state.lock().map_err(|_e| io_err("Poison"))?; let state = &mut *state_guard; state.write_buffer(buffer).map(|()| buffer.len()) } #[cfg(feature = "async")] StateHandle::Async(handle) => { handle .sender .send(buffer.to_owned()) .map_err(|_e| io_err("Send"))?; Ok(buffer.len()) } } } #[allow(clippy::unnecessary_wraps)] // necessary if not compiled with feature = "async" #[inline] pub(super) fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { match &self { StateHandle::Sync(handle) => { buffer_with(|tl_buf| match tl_buf.try_borrow_mut() { Ok(mut buffer) => { (handle.format_function)(&mut *buffer, now, record).unwrap_or_else(|e| { eprint_err(ErrorCode::Format, "formatting failed", &e); }); buffer .write_all(handle.line_ending) .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); handle .am_state .lock() .expect("state_handle.am_state is poisoned") .write_buffer(&buffer) .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); buffer.clear(); } Err(_e) => { // We arrive here in the rare cases of recursive logging // (e.g. log calls in Debug or Display implementations) // we print the inner calls, in chronological order, before finally the // outer most message is printed let mut tmp_buf = Vec::::with_capacity(200); (handle.format_function)(&mut tmp_buf, now, record).unwrap_or_else(|e| { eprint_err(ErrorCode::Format, "formatting failed", &e); }); let mut state_guard = handle .am_state .lock() .expect("state_handle.am_state is poisoned"); let state = &mut *state_guard; tmp_buf .write_all(state.config().line_ending) .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); state .write_buffer(&tmp_buf) .unwrap_or_else(|e| eprint_err(ErrorCode::Write, "writing failed", &e)); } }); } #[cfg(feature = "async")] StateHandle::Async(handle) => handle.write(now, record)?, } Ok(()) } #[inline] pub(super) fn flush(&self) -> std::io::Result<()> { match &self { StateHandle::Sync(handle) => { if let Ok(ref mut state) = handle.am_state.lock() { state.flush()?; } } #[cfg(feature = "async")] StateHandle::Async(handle) => { let mut buffer = handle.pop_buffer(); buffer.extend(ASYNC_FLUSH); handle.sender.send(buffer).ok(); } } Ok(()) } // Replaces parts of the configuration of the file log writer. pub(super) fn reset(&self, flwb: &FileLogWriterBuilder) -> Result<(), FlexiLoggerError> { let mut state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] StateHandle::Async(handle) => handle.am_state.lock(), } .map_err(|_| FlexiLoggerError::Poison)?; flwb.assert_write_mode((*state).config().write_mode)?; *state = flwb.try_build_state()?; Ok(()) } pub(super) fn reopen_outputfile(&self) -> Result<(), FlexiLoggerError> { let mut state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] StateHandle::Async(handle) => handle.am_state.lock(), } .map_err(|_| FlexiLoggerError::Poison)?; Ok(state.reopen_outputfile()?) } pub(super) fn rotate(&self) -> Result<(), FlexiLoggerError> { let mut state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] StateHandle::Async(handle) => handle.am_state.lock(), } .map_err(|_| FlexiLoggerError::Poison)?; state.mount_next_linewriter_if_necessary(true) } pub(crate) fn config(&self) -> Result { let state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] StateHandle::Async(handle) => handle.am_state.lock(), } .map_err(|_| FlexiLoggerError::Poison)?; Ok(state.config().clone()) } pub(super) fn existing_log_files( &self, selector: &LogfileSelector, ) -> Result, FlexiLoggerError> { let state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] StateHandle::Async(handle) => handle.am_state.lock(), } .map_err(|_| FlexiLoggerError::Poison)?; Ok(state.existing_log_files(selector)) } pub(super) fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] StateHandle::Async(handle) => handle.am_state.lock(), } .map(|mut state| state.validate_logs(expected)) .ok(); } pub(super) fn shutdown(&self) { match &self { StateHandle::Sync(handle) => { // do nothing in case of poison errors if let Ok(ref mut state) = handle.am_state.lock() { state.shutdown(); } } #[cfg(feature = "async")] StateHandle::Async(handle) => { let mut buffer = handle.pop_buffer(); buffer.extend(ASYNC_SHUTDOWN); handle.sender.send(buffer).ok(); if let Ok(ref mut o_th) = handle.mo_thread_handle.lock() { o_th.take().and_then(|th| th.join().ok()); } } } } } flexi_logger-0.29.8/src/writers/file_log_writer/threads.rs000064400000000000000000000000011046102023000220230ustar 00000000000000 flexi_logger-0.29.8/src/writers/file_log_writer.rs000064400000000000000000000537651046102023000204220ustar 00000000000000#![allow(clippy::module_name_repetitions)] mod builder; mod config; mod infix_filter; mod state; mod state_handle; mod threads; pub use self::builder::{ArcFileLogWriter, FileLogWriterBuilder, FileLogWriterHandle}; pub use self::config::FileLogWriterConfig; pub(crate) use infix_filter::InfixFilter; use self::{config::RotationConfig, state::State, state_handle::StateHandle}; use crate::{ writers::LogWriter, DeferredNow, EffectiveWriteMode, FileSpec, FlexiLoggerError, FormatFunction, LogfileSelector, }; use log::Record; use std::path::PathBuf; const WINDOWS_LINE_ENDING: &[u8] = b"\r\n"; const UNIX_LINE_ENDING: &[u8] = b"\n"; /// A configurable [`LogWriter`] implementation that writes to a file or a sequence of files. /// /// See [writers](crate::writers) for usage guidance. #[derive(Debug)] pub struct FileLogWriter { // the state needs to be mutable; since `Log.log()` requires an unmutable self, // which translates into a non-mutating `LogWriter::write()`, // we need internal mutability and thread-safety. state_handle: StateHandle, max_log_level: log::LevelFilter, } impl FileLogWriter { fn new( state: State, max_log_level: log::LevelFilter, format_function: FormatFunction, ) -> FileLogWriter { let state_handle = match state.config().write_mode.effective_write_mode() { EffectiveWriteMode::Direct | EffectiveWriteMode::BufferAndFlushWith(_) | EffectiveWriteMode::BufferDontFlushWith(_) => { StateHandle::new_sync(state, format_function) } #[cfg(feature = "async")] EffectiveWriteMode::AsyncWith { pool_capa, message_capa, flush_interval: _, } => StateHandle::new_async(pool_capa, message_capa, state, format_function), }; FileLogWriter { state_handle, max_log_level, } } /// Instantiates a builder for `FileLogWriter`. #[must_use] pub fn builder(file_spec: FileSpec) -> FileLogWriterBuilder { FileLogWriterBuilder::new(file_spec) } /// Returns a reference to its configured output format function. #[must_use] #[inline] pub fn format(&self) -> FormatFunction { self.state_handle.format_function() } pub(crate) fn plain_write(&self, buffer: &[u8]) -> std::result::Result { self.state_handle.plain_write(buffer) } /// Replaces parts of the configuration of the file log writer. /// /// Note that the write mode and the format function cannot be reset and /// that the provided `FileLogWriterBuilder` must have the same values for these as the /// current `FileLogWriter`. /// /// # Errors /// /// `FlexiLoggerError::Reset` if a reset was tried with a different write mode. /// /// `FlexiLoggerError::Io` if the specified path doesn't work. /// /// `FlexiLoggerError::OutputBadDirectory` if the specified path is not a directory. /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn reset(&self, flwb: &FileLogWriterBuilder) -> Result<(), FlexiLoggerError> { self.state_handle.reset(flwb) } /// Returns the current configuration of the file log writer /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn config(&self) -> Result { self.state_handle.config() } /// Makes the `FileLogWriter` re-open the current log file. /// /// `FileLogWriter` expects that nobody else modifies the file to which it writes, /// and offers capabilities to rotate, compress, and clean up log files. /// /// However, if you use tools like linux' `logrotate` /// to rename or delete the current output file, you need to inform the `FileLogWriter` about /// such actions by calling this method. Otherwise the `FileLogWriter` will not stop /// writing to the renamed or even deleted file! /// /// # Example /// /// `logrotate` e.g. can be configured to send a `SIGHUP` signal to your program. You should /// handle `SIGHUP` in your program explicitly, /// e.g. using a crate like [`ctrlc`](https://docs.rs/ctrlc/latest/ctrlc/), /// and call this function from the registered signal handler. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn reopen_outputfile(&self) -> Result<(), FlexiLoggerError> { self.state_handle.reopen_outputfile() } /// Trigger an extra log file rotation. /// /// Does nothing if rotation is not configured. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. /// /// IO errors. pub fn rotate(&self) -> Result<(), FlexiLoggerError> { self.state_handle.rotate() } /// Returns the list of existing log files according to the current `FileSpec`. /// /// The list includes the current log file and the compressed files, if they exist. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. pub fn existing_log_files( &self, selector: &LogfileSelector, ) -> Result, FlexiLoggerError> { self.state_handle.existing_log_files(selector) } } impl LogWriter for FileLogWriter { #[inline] fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { if record.level() <= self.max_log_level { self.state_handle.write(now, record) } else { Ok(()) } } #[inline] fn flush(&self) -> std::io::Result<()> { self.state_handle.flush() } #[inline] fn max_log_level(&self) -> log::LevelFilter { self.max_log_level } fn reopen_output(&self) -> Result<(), FlexiLoggerError> { self.reopen_outputfile() } fn rotate(&self) -> Result<(), FlexiLoggerError> { self.state_handle.rotate() } fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { self.state_handle.validate_logs(expected); } fn shutdown(&self) { self.state_handle.shutdown(); } } impl Drop for FileLogWriter { fn drop(&mut self) { self.shutdown(); } } #[cfg(test)] mod test { #[cfg(feature = "async")] use crate::ZERO_DURATION; use crate::{writers::LogWriter, Cleanup, Criterion, DeferredNow, FileSpec, Naming, WriteMode}; use chrono::Local; use std::ops::Add; use std::path::{Path, PathBuf}; use std::time::Duration; const DIRECTORY: &str = r"log_files/rotate"; const ONE: &str = "ONE"; const TWO: &str = "TWO"; const THREE: &str = "THREE"; const FOUR: &str = "FOUR"; const FIVE: &str = "FIVE"; const SIX: &str = "SIX"; const SEVEN: &str = "SEVEN"; const EIGHT: &str = "EIGHT"; const NINE: &str = "NINE"; const FMT_DASHES_U_DASHES: &str = "%Y-%m-%d_%H-%M-%S"; #[test] fn test_rotate_no_append_numbers() { // we use timestamp as discriminant to allow repeated runs let ts = String::from("false-numbers-") + &Local::now().format(FMT_DASHES_U_DASHES).to_string(); let naming = Naming::Numbers; // ensure we start with -/-/- assert!(not_exists("00000", &ts)); assert!(not_exists("00001", &ts)); assert!(not_exists("CURRENT", &ts)); // ensure this produces -/-/ONE write_loglines(false, naming, &ts, &[ONE]); assert!(not_exists("00000", &ts)); assert!(not_exists("00001", &ts)); assert!(contains("CURRENT", &ts, ONE)); // ensure this produces ONE/-/TWO write_loglines(false, naming, &ts, &[TWO]); assert!(contains("00000", &ts, ONE)); assert!(not_exists("00001", &ts)); assert!(contains("CURRENT", &ts, TWO)); // ensure this also produces ONE/-/TWO remove("CURRENT", &ts); assert!(not_exists("CURRENT", &ts)); write_loglines(false, naming, &ts, &[TWO]); assert!(contains("00000", &ts, ONE)); assert!(not_exists("00001", &ts)); assert!(contains("CURRENT", &ts, TWO)); // ensure this produces ONE/TWO/THREE write_loglines(false, naming, &ts, &[THREE]); assert!(contains("00000", &ts, ONE)); assert!(contains("00001", &ts, TWO)); assert!(contains("CURRENT", &ts, THREE)); } #[test] fn test_rotate_with_append_numbers() { // we use timestamp as discriminant to allow repeated runs let ts = String::from("true-numbers-") + &Local::now().format(FMT_DASHES_U_DASHES).to_string(); let naming = Naming::Numbers; // ensure we start with -/-/- assert!(not_exists("00000", &ts)); assert!(not_exists("00001", &ts)); assert!(not_exists("CURRENT", &ts)); // ensure this produces 12/-/3 write_loglines(true, naming, &ts, &[ONE, TWO, THREE]); assert!(contains("00000", &ts, ONE)); assert!(contains("00000", &ts, TWO)); assert!(not_exists("00001", &ts)); assert!(contains("CURRENT", &ts, THREE)); // ensure this produces 12/34/56 write_loglines(true, naming, &ts, &[FOUR, FIVE, SIX]); assert!(contains("00000", &ts, ONE)); assert!(contains("00000", &ts, TWO)); assert!(contains("00001", &ts, THREE)); assert!(contains("00001", &ts, FOUR)); assert!(contains("CURRENT", &ts, FIVE)); assert!(contains("CURRENT", &ts, SIX)); // ensure this also produces 12/34/56 remove("CURRENT", &ts); remove("00001", &ts); assert!(not_exists("CURRENT", &ts)); write_loglines(true, naming, &ts, &[THREE, FOUR, FIVE, SIX]); assert!(contains("00000", &ts, ONE)); assert!(contains("00000", &ts, TWO)); assert!(contains("00001", &ts, THREE)); assert!(contains("00001", &ts, FOUR)); assert!(contains("CURRENT", &ts, FIVE)); assert!(contains("CURRENT", &ts, SIX)); // ensure this produces 12/34/56/78/9 write_loglines(true, naming, &ts, &[SEVEN, EIGHT, NINE]); assert!(contains("00002", &ts, FIVE)); assert!(contains("00002", &ts, SIX)); assert!(contains("00003", &ts, SEVEN)); assert!(contains("00003", &ts, EIGHT)); assert!(contains("CURRENT", &ts, NINE)); } #[test] fn test_rotate_no_append_timestamps() { // we use timestamp as discriminant to allow repeated runs let ts_discr = String::from("false-timestamps-") + &Local::now().format(FMT_DASHES_U_DASHES).to_string(); let basename = String::from(DIRECTORY).add("/").add( &Path::new(&std::env::args().next().unwrap()) .file_stem().unwrap(/*cannot fail*/) .to_string_lossy(), ); let naming = Naming::Timestamps; println!("{} ensure we start with -/-/-", chrono::Local::now()); assert!(list_rotated_files(&basename, &ts_discr).is_empty()); assert!(not_exists("CURRENT", &ts_discr)); println!("{} ensure this produces -/-/ONE", chrono::Local::now()); write_loglines(false, naming, &ts_discr, &[ONE]); assert!(list_rotated_files(&basename, &ts_discr).is_empty()); assert!(contains("CURRENT", &ts_discr, ONE)); std::thread::sleep(Duration::from_secs(2)); println!("{} ensure this produces ONE/-/TWO", chrono::Local::now()); write_loglines(false, naming, &ts_discr, &[TWO]); assert_eq!(list_rotated_files(&basename, &ts_discr).len(), 1); assert!(contains("CURRENT", &ts_discr, TWO)); std::thread::sleep(Duration::from_secs(2)); println!( "{} ensure this produces ONE/TWO/THREE", chrono::Local::now() ); write_loglines(false, naming, &ts_discr, &[THREE]); assert_eq!(list_rotated_files(&basename, &ts_discr).len(), 2); assert!(contains("CURRENT", &ts_discr, THREE)); } #[test] fn test_rotate_with_append_timestamps() { // we use timestamp as discriminant to allow repeated runs let ts = String::from("true-timestamps-") + &Local::now().format(FMT_DASHES_U_DASHES).to_string(); let basename = String::from(DIRECTORY).add("/").add( &Path::new(&std::env::args().next().unwrap()) .file_stem().unwrap(/*cannot fail*/) .to_string_lossy(), ); let naming = Naming::Timestamps; // ensure we start with -/-/- assert!(list_rotated_files(&basename, &ts).is_empty()); assert!(not_exists("CURRENT", &ts)); // ensure this produces 12/-/3 write_loglines(true, naming, &ts, &[ONE, TWO, THREE]); assert_eq!(list_rotated_files(&basename, &ts).len(), 1); assert!(contains("CURRENT", &ts, THREE)); // ensure this produces 12/34/56 write_loglines(true, naming, &ts, &[FOUR, FIVE, SIX]); assert!(contains("CURRENT", &ts, FIVE)); assert!(contains("CURRENT", &ts, SIX)); assert_eq!(list_rotated_files(&basename, &ts).len(), 2); // ensure this produces 12/34/56/78/9 write_loglines(true, naming, &ts, &[SEVEN, EIGHT, NINE]); assert_eq!(list_rotated_files(&basename, &ts).len(), 4); assert!(contains("CURRENT", &ts, NINE)); } #[test] fn issue_38() { const NUMBER_OF_FILES: usize = 5; const NUMBER_OF_PSEUDO_PROCESSES: usize = 11; const ISSUE_38: &str = "issue_38"; const LOG_FOLDER: &str = "log_files/issue_38"; for _ in 0..NUMBER_OF_PSEUDO_PROCESSES { let flwb = crate::writers::file_log_writer::FileLogWriter::builder( FileSpec::default() .directory(LOG_FOLDER) .discriminant(ISSUE_38), ) .rotate( Criterion::Size(500), Naming::Timestamps, Cleanup::KeepLogFiles(NUMBER_OF_FILES), ) .o_append(false); #[cfg(feature = "async")] let flwb = flwb.write_mode(WriteMode::AsyncWith { pool_capa: 5, message_capa: 400, flush_interval: ZERO_DURATION, }); let flw = flwb.try_build().unwrap(); // write some lines, but not enough to rotate for i in 0..4 { flw.write( &mut DeferredNow::new(), &log::Record::builder() .args(format_args!("{i}")) .level(log::Level::Error) .target("myApp") .file(Some("server.rs")) .line(Some(144)) .module_path(Some("server")) .build(), ) .unwrap(); } flw.flush().ok(); } // give the cleanup thread a short moment of time std::thread::sleep(Duration::from_millis(50)); let fn_pattern = String::with_capacity(180) .add( &String::from(LOG_FOLDER).add("/").add( &Path::new(&std::env::args().next().unwrap()) .file_stem().unwrap(/*cannot fail*/) .to_string_lossy(), ), ) .add("_") .add(ISSUE_38) .add("_r[0-9]*") .add(".log"); assert_eq!( glob::glob(&fn_pattern) .unwrap() .filter_map(Result::ok) .count(), NUMBER_OF_FILES ); } #[test] fn test_reset() { #[cfg(not(feature = "async"))] let write_mode = WriteMode::BufferDontFlushWith(4); #[cfg(feature = "async")] let write_mode = WriteMode::AsyncWith { pool_capa: 7, message_capa: 8, flush_interval: ZERO_DURATION, }; let flw = super::FileLogWriter::builder( FileSpec::default() .directory(DIRECTORY) .discriminant("test_reset-1"), ) .rotate( Criterion::Size(28), Naming::Numbers, Cleanup::KeepLogFiles(20), ) .append() .write_mode(write_mode) .try_build() .unwrap(); flw.write( &mut DeferredNow::new(), &log::Record::builder() .args(format_args!("{}", "test_reset-1")) .level(log::Level::Error) .target("test_reset") .file(Some("server.rs")) .line(Some(144)) .module_path(Some("server")) .build(), ) .unwrap(); println!("FileLogWriter {flw:?}"); flw.reset( &super::FileLogWriter::builder( FileSpec::default() .directory(DIRECTORY) .discriminant("test_reset-2"), ) .rotate( Criterion::Size(28), Naming::Numbers, Cleanup::KeepLogFiles(20), ) .write_mode(write_mode), ) .unwrap(); flw.write( &mut DeferredNow::new(), &log::Record::builder() .args(format_args!("{}", "test_reset-2")) .level(log::Level::Error) .target("test_reset") .file(Some("server.rs")) .line(Some(144)) .module_path(Some("server")) .build(), ) .unwrap(); println!("FileLogWriter {flw:?}"); assert!(flw .reset( &super::FileLogWriter::builder( FileSpec::default() .directory(DIRECTORY) .discriminant("test_reset-3"), ) .rotate( Criterion::Size(28), Naming::Numbers, Cleanup::KeepLogFiles(20), ) .write_mode(WriteMode::Direct), ) .is_err()); } #[test] fn test_max_log_level() { let spec = FileSpec::default() .directory(DIRECTORY) .discriminant("test_max_log_level-1") .suppress_basename() .suppress_timestamp(); let flw = super::FileLogWriter::builder(spec.clone()) .max_level(log::LevelFilter::Warn) .write_mode(WriteMode::Direct) .try_build() .unwrap(); let write_msg = |level, msg| { flw.write( &mut DeferredNow::new(), &log::Record::builder() .args(format_args!("{msg}")) .level(level) .target("test_max_log_level") .file(Some("server.rs")) .line(Some(144)) .module_path(Some("server")) .build(), ) .unwrap(); }; write_msg(log::Level::Trace, "trace message"); write_msg(log::Level::Debug, "debug message"); write_msg(log::Level::Info, "info message"); write_msg(log::Level::Warn, "warn message"); write_msg(log::Level::Error, "error message"); let log_contents = std::fs::read_to_string(spec.as_pathbuf(None)).unwrap(); assert!(!log_contents.contains("trace message")); assert!(!log_contents.contains("debug message")); assert!(!log_contents.contains("info message")); assert!(log_contents.contains("warn message")); assert!(log_contents.contains("error message")); } fn remove(s: &str, discr: &str) { std::fs::remove_file(get_hackyfilepath(s, discr)).unwrap(); } fn not_exists(s: &str, discr: &str) -> bool { !get_hackyfilepath(s, discr).exists() } fn contains(s: &str, discr: &str, text: &str) -> bool { match std::fs::read_to_string(get_hackyfilepath(s, discr)) { Err(_) => false, Ok(s) => s.contains(text), } } fn get_hackyfilepath(infix: &str, discr: &str) -> Box { let arg0 = std::env::args().next().unwrap(); let mut s_filename = Path::new(&arg0) .file_stem() .unwrap() .to_string_lossy() .to_string(); s_filename += "_"; s_filename += discr; s_filename += "_r"; s_filename += infix; s_filename += ".log"; let mut path_buf = PathBuf::from(DIRECTORY); path_buf.push(s_filename); path_buf.into_boxed_path() } fn write_loglines(append: bool, naming: Naming, discr: &str, texts: &[&'static str]) { let flw = get_file_log_writer(append, naming, discr); for text in texts { flw.write( &mut DeferredNow::new(), &log::Record::builder() .args(format_args!("{text}")) .level(log::Level::Error) .target("myApp") .file(Some("server.rs")) .line(Some(144)) .module_path(Some("server")) .build(), ) .unwrap(); } } fn get_file_log_writer( append: bool, naming: Naming, discr: &str, ) -> crate::writers::FileLogWriter { super::FileLogWriter::builder(FileSpec::default().directory(DIRECTORY).discriminant(discr)) .rotate( Criterion::Size(if append { 28 } else { 10 }), naming, Cleanup::Never, ) .o_append(append) .try_build() .unwrap() } fn list_rotated_files(basename: &str, discr: &str) -> Vec { let fn_pattern = String::with_capacity(180) .add(basename) .add("_") .add(discr) .add("_r2[0-9]*") // Year 3000 problem!!! .add(".log"); glob::glob(&fn_pattern) .unwrap() .map(|r| r.unwrap().into_os_string().to_string_lossy().to_string()) .collect() } } flexi_logger-0.29.8/src/writers/log_writer.rs000064400000000000000000000044461046102023000174130ustar 00000000000000use crate::{DeferredNow, FlexiLoggerError, FormatFunction}; use log::Record; /// Writes to a single log output stream. /// /// Boxed instances of `LogWriter` can be used as additional log targets /// (see [writers](crate::writers) for more details). pub trait LogWriter: Sync + Send { /// Writes out a log line. /// /// # Errors /// /// [`std::io::Error`] fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()>; /// Flushes any buffered records. /// /// # Errors /// /// [`std::io::Error`] fn flush(&self) -> std::io::Result<()>; /// Provides the maximum log level that is to be written. fn max_log_level(&self) -> log::LevelFilter { log::LevelFilter::Trace } /// Sets the format function. /// /// Defaults to [`default_format`](crate::default_format), /// but can be changed with a call to /// [`Logger::format_for_writer`](crate::Logger::format_for_writer). /// /// The default implementation is a no-op. fn format(&mut self, format: FormatFunction) { _ = format; } /// Cleanup open resources, if necessary. fn shutdown(&self) {} /// Re-open the current output, if meaningful. /// /// This method is called from /// [`LoggerHandle::reopen_output`](crate::LoggerHandle::reopen_output) /// for all registered additional writers. /// /// # Errors /// /// Depend on registered writers. fn reopen_output(&self) -> Result<(), FlexiLoggerError> { Ok(()) } /// Rotate the current output, if meaningful. /// /// This method is called from /// [`LoggerHandle::trigger_rotation`](crate::LoggerHandle::trigger_rotation) /// for all registered additional writers. /// /// # Errors /// /// Depend on registered writers. fn rotate(&self) -> Result<(), FlexiLoggerError> { Ok(()) } // Takes a vec with three patterns per line that represent the log line, // compares the written log with the expected lines, // and asserts that both are in sync. // // This function is not meant for productive code, only for tests. #[doc(hidden)] fn validate_logs(&self, _expected: &[(&'static str, &'static str, &'static str)]) { unimplemented!("only useful for tests"); } } flexi_logger-0.29.8/src/writers/syslog/builder.rs000064400000000000000000000071121046102023000201750ustar 00000000000000use super::{ line::SyslogLineHeader, severity::default_mapping, syslog_default_format, LevelToSyslogSeverity, SyslogConnection, SyslogFacility, SyslogWriter, }; use crate::FormatFunction; use std::io::{Error as IoError, ErrorKind, Result as IoResult}; #[allow(clippy::module_name_repetitions)] /// Builder for the `SyslogWriter`. /// /// Is created with [`SyslogWriter::builder`]. pub struct SyslogWriterBuilder { syslog_connection: SyslogConnection, syslog_line_header: SyslogLineHeader, syslog_facility: SyslogFacility, custom_process_name: Option, determine_severity: LevelToSyslogSeverity, max_log_level: log::LevelFilter, format: FormatFunction, } impl SyslogWriterBuilder { #[must_use] pub(super) fn new( syslog: SyslogConnection, syslog_line_header: SyslogLineHeader, syslog_facility: SyslogFacility, ) -> SyslogWriterBuilder { SyslogWriterBuilder { syslog_connection: syslog, syslog_line_header, syslog_facility, custom_process_name: None, determine_severity: default_mapping, max_log_level: log::LevelFilter::Warn, format: syslog_default_format, } } /// Specify a custom process name, or unset it to revert back to name inference. #[must_use] pub fn custom_process_name(mut self, name: Option<&str>) -> Self { self.custom_process_name = name.map(Into::into); self } /// Use the given function to map the rust log levels to the syslog severities. /// By default a trivial mapping is used, which should be good enough in most cases. #[must_use] pub fn determine_severity(mut self, mapping: LevelToSyslogSeverity) -> Self { self.determine_severity = mapping; self } /// Specify up to which level log messages should be sent to the syslog. /// /// Default is: only warnings and errors. #[must_use] pub fn max_log_level(mut self, max_log_level: log::LevelFilter) -> Self { self.max_log_level = max_log_level; self } /// Use the given format function to write the message part of the syslog entries. /// /// By default, [`syslog_default_format`](crate::writers::syslog_default_format) is used. /// /// You can instead use [`syslog_format_with_thread`](crate::writers::syslog_format_with_thread) /// or your own `FormatFunction` /// (see the source code of the provided functions if you want to write your own). #[must_use] pub fn format(mut self, format: FormatFunction) -> Self { self.format = format; self } /// Returns a boxed instance of `SysLogWriter`. /// /// # Errors /// /// `std::io::Error` if the program's argument list is empty so that the process /// identifier for the syslog cannot be determined pub fn build(self) -> IoResult> { Ok(Box::new(SyslogWriter::new( std::process::id(), self.custom_process_name .or(std::env::args().next()) .ok_or_else(|| { IoError::new( ErrorKind::Other, "Can't provide a process name as no env args are present and \ no custom process name is set" .to_owned(), ) })?, self.syslog_line_header, self.syslog_facility, self.determine_severity, self.syslog_connection, self.max_log_level, self.format, )?)) } } flexi_logger-0.29.8/src/writers/syslog/connection.rs000064400000000000000000000041371046102023000207120ustar 00000000000000use std::{ io::{Result as IoResult, Write}, net::{TcpStream, UdpSocket}, }; // Writable and flushable connection to the syslog backend. #[derive(Debug)] pub(super) enum Connection { // Sends log lines to the syslog via a // [UnixStream](https://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html). #[cfg_attr(docsrs, doc(cfg(target_family = "unix")))] #[cfg(target_family = "unix")] Stream(std::os::unix::net::UnixStream), // Sends log lines to the syslog via a // [UnixDatagram](https://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html). #[cfg_attr(docsrs, doc(cfg(target_family = "unix")))] #[cfg(target_family = "unix")] Datagram(std::os::unix::net::UnixDatagram), // Sends log lines to the syslog via UDP. // // UDP is fragile and thus discouraged except for local communication. Udp(UdpSocket), // Sends log lines to the syslog via TCP. Tcp(TcpStream), } impl Write for Connection { fn write(&mut self, buf: &[u8]) -> IoResult { match *self { #[cfg(target_family = "unix")] Self::Datagram(ref ud) => { // todo: reconnect if conn is broken ud.send(buf) } #[cfg(target_family = "unix")] Self::Stream(ref mut w) => { // todo: reconnect if conn is broken w.write(buf) .and_then(|sz| w.write_all(&[0; 1]).map(|()| sz)) } Self::Tcp(ref mut w) => { // todo: reconnect if conn is broken let n = w.write(buf)?; Ok(w.write(b"\n")? + n) } Self::Udp(ref socket) => { // ?? socket.send(buf) } } } fn flush(&mut self) -> IoResult<()> { match *self { #[cfg(target_family = "unix")] Self::Datagram(_) => Ok(()), #[cfg(target_family = "unix")] Self::Stream(ref mut w) => w.flush(), Self::Udp(_) => Ok(()), Self::Tcp(ref mut w) => w.flush(), } } } flexi_logger-0.29.8/src/writers/syslog/facility.rs000064400000000000000000000027761046102023000203660ustar 00000000000000/// Syslog Facility, according to [RFC 5424](https://datatracker.ietf.org/doc/rfc5424). /// /// Note that the original integer values are already multiplied by 8. #[derive(Copy, Clone, Debug)] #[allow(clippy::module_name_repetitions)] pub enum SyslogFacility { /// kernel messages. Kernel = 0 << 3, /// user-level messages. UserLevel = 1 << 3, /// mail system. MailSystem = 2 << 3, /// system daemons. SystemDaemons = 3 << 3, /// security/authorization messages. Authorization = 4 << 3, /// messages generated internally by syslogd. SyslogD = 5 << 3, /// line printer subsystem. LinePrinter = 6 << 3, /// network news subsystem. News = 7 << 3, /// UUCP subsystem. Uucp = 8 << 3, /// clock daemon. Clock = 9 << 3, /// security/authorization messages. Authorization2 = 10 << 3, /// FTP daemon. Ftp = 11 << 3, /// NTP subsystem. Ntp = 12 << 3, /// log audit. LogAudit = 13 << 3, /// log alert. LogAlert = 14 << 3, /// clock daemon (note 2). Clock2 = 15 << 3, /// local use 0 (local0). LocalUse0 = 16 << 3, /// local use 1 (local1). LocalUse1 = 17 << 3, /// local use 2 (local2). LocalUse2 = 18 << 3, /// local use 3 (local3). LocalUse3 = 19 << 3, /// local use 4 (local4). LocalUse4 = 20 << 3, /// local use 5 (local5). LocalUse5 = 21 << 3, /// local use 6 (local6). LocalUse6 = 22 << 3, /// local use 7 (local7). LocalUse7 = 23 << 3, } flexi_logger-0.29.8/src/writers/syslog/formats.rs000064400000000000000000000016411046102023000202230ustar 00000000000000use crate::DeferredNow; use log::Record; /// Default way of writing the message to the syslog. /// /// Just uses the `Display` implementation of `record.args()`. /// /// # Errors /// /// `std:io::Error` from writing to the given output stram. pub fn syslog_default_format( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { write!(w, "{}", record.args()) } /// Similar to `syslog_default_format`, but inserts the thread name in the beginning of the message, /// encapsulated in square brackets. /// /// # Errors /// /// `std:io::Error` from writing to the given output stram. pub fn syslog_format_with_thread( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { write!( w, "[{}] {}", std::thread::current().name().unwrap_or(""), record.args() ) } flexi_logger-0.29.8/src/writers/syslog/line.rs000064400000000000000000000077461046102023000175130ustar 00000000000000use std::io::{Error as IoError, ErrorKind, Result as IoResult, Write}; use crate::{DeferredNow, FormatFunction}; use super::{LevelToSyslogSeverity, SyslogFacility}; /// Defines the format of the header of a syslog line. pub enum SyslogLineHeader { /// Line header according to RFC 5424. Rfc5424(String), /// Line header according to RFC 3164. Rfc3164, } pub(crate) struct LineWriter { header: SyslogLineHeader, hostname: String, process: String, pid: u32, format: FormatFunction, determine_severity: LevelToSyslogSeverity, facility: SyslogFacility, } impl LineWriter { pub(crate) fn new( header: SyslogLineHeader, determine_severity: LevelToSyslogSeverity, facility: SyslogFacility, process: String, pid: u32, format: FormatFunction, ) -> IoResult { const UNKNOWN_HOSTNAME: &str = ""; Ok(LineWriter { header, hostname: hostname::get().map_or_else( |_| Ok(UNKNOWN_HOSTNAME.to_owned()), |s| { s.into_string().map_err(|_| { IoError::new( ErrorKind::InvalidData, "Hostname contains non-UTF8 characters".to_owned(), ) }) }, )?, process, pid, format, determine_severity, facility, }) } pub(crate) fn write_syslog_entry( &self, buffer: &mut dyn Write, now: &mut DeferredNow, record: &log::Record, ) -> IoResult<()> { // See [RFC 5424](https://datatracker.ietf.org/doc/rfc5424#page-8). let severity = (self.determine_severity)(record.level()); match self.header { SyslogLineHeader::Rfc3164 => { write!( buffer, "<{pri}>{timestamp} {tag}[{procid}]: ", pri = self.facility as u8 | severity as u8, timestamp = now.format_rfc3164(), tag = self.process, procid = self.pid )?; (self.format)(buffer, now, record)?; } SyslogLineHeader::Rfc5424(ref message_id) => { #[allow(clippy::write_literal)] write!( buffer, "<{pri}>{version} {timestamp} {hostname} {appname} {procid} {msgid} ", pri = self.facility as u8 | severity as u8, version = "1", timestamp = now.format_rfc3339(), hostname = self.hostname, appname = self.process, procid = self.pid, msgid = message_id, )?; write_key_value_pairs(buffer, record)?; (self.format)(buffer, now, record)?; } } Ok(()) } } // Helpers for printing key-value pairs fn write_key_value_pairs( w: &mut dyn std::io::Write, record: &log::Record<'_>, ) -> Result<(), std::io::Error> { let mut kv_written = false; #[cfg(feature = "kv")] if record.key_values().count() > 0 { write!(w, "[log_kv ",)?; let mut kv_stream = KvStream(w, false); record.key_values().visit(&mut kv_stream).ok(); write!(w, "] ")?; kv_written = true; } if !kv_written { write!(w, "- ")?; } Ok(()) } #[cfg(feature = "kv")] struct KvStream<'a>(&'a mut dyn std::io::Write, bool); #[cfg(feature = "kv")] impl<'kvs, 'a> log::kv::VisitSource<'kvs> for KvStream<'a> where 'kvs: 'a, { fn visit_pair( &mut self, key: log::kv::Key<'kvs>, value: log::kv::Value<'kvs>, ) -> Result<(), log::kv::Error> { if self.1 { write!(self.0, " ")?; } write!(self.0, "{key}=\"{value:?}\"")?; self.1 = true; Ok(()) } } flexi_logger-0.29.8/src/writers/syslog/severity.rs000064400000000000000000000021261046102023000204210ustar 00000000000000/// Syslog severity. /// /// See [RFC 5424](https://datatracker.ietf.org/doc/rfc5424). #[derive(Debug)] #[allow(clippy::module_name_repetitions)] pub enum SyslogSeverity { /// System is unusable. Emergency = 0, /// Action must be taken immediately. Alert = 1, /// Critical conditions. Critical = 2, /// Error conditions. Error = 3, /// Warning conditions Warning = 4, /// Normal but significant condition Notice = 5, /// Informational messages. Info = 6, /// Debug-level messages. Debug = 7, } /// Signature for a custom mapping function that maps the rust log levels to /// values of the syslog Severity. #[allow(clippy::module_name_repetitions)] pub type LevelToSyslogSeverity = fn(level: log::Level) -> SyslogSeverity; pub(crate) fn default_mapping(level: log::Level) -> SyslogSeverity { match level { log::Level::Error => SyslogSeverity::Error, log::Level::Warn => SyslogSeverity::Warning, log::Level::Info => SyslogSeverity::Info, log::Level::Debug | log::Level::Trace => SyslogSeverity::Debug, } } flexi_logger-0.29.8/src/writers/syslog/syslog_connection.rs000064400000000000000000000044721046102023000223140ustar 00000000000000use super::connection::Connection; #[cfg(target_family = "unix")] use std::path::Path; use std::{ io::Result as IoResult, net::{TcpStream, ToSocketAddrs, UdpSocket}, }; /// Implements the connection to the syslog. /// /// Choose one of the factory methods that matches your environment, /// depending on how the syslog is managed on your system, /// how you can access it and with which protocol you can write to it. /// /// Is required to instantiate a [`SyslogWriter`](crate::writers::SyslogWriter). #[allow(clippy::module_name_repetitions)] pub struct SyslogConnection(Connection); impl SyslogConnection { /// Returns a `Syslog` that connects via unix datagram to the specified path. /// /// # Errors /// /// Any kind of I/O error can occur. #[cfg_attr(docsrs, doc(cfg(target_family = "unix")))] #[cfg(target_family = "unix")] pub fn try_datagram>(path: P) -> IoResult { let ud = std::os::unix::net::UnixDatagram::unbound()?; ud.connect(&path)?; Ok(SyslogConnection(Connection::Datagram(ud))) } /// Returns a `Syslog` that connects via unix stream to the specified path. /// /// # Errors /// /// Any kind of I/O error can occur. #[cfg_attr(docsrs, doc(cfg(target_family = "unix")))] #[cfg(target_family = "unix")] pub fn try_stream>(path: P) -> IoResult { Ok(SyslogConnection(Connection::Stream( std::os::unix::net::UnixStream::connect(path)?, ))) } /// Returns a `Syslog` that sends the log lines via TCP to the specified address. /// /// # Errors /// /// `std::io::Error` if opening the stream fails. pub fn try_tcp(server: T) -> IoResult { Ok(SyslogConnection(Connection::Tcp(TcpStream::connect( server, )?))) } /// Returns a `Syslog` that sends the log via the fragile UDP protocol from local /// to server. /// /// # Errors /// /// `std::io::Error` if opening the stream fails. pub fn try_udp(local: T, server: T) -> IoResult { let socket = UdpSocket::bind(local)?; socket.connect(server)?; Ok(SyslogConnection(Connection::Udp(socket))) } pub(super) fn into_inner(self) -> Connection { self.0 } } flexi_logger-0.29.8/src/writers/syslog/writer.rs000064400000000000000000000234021046102023000200630ustar 00000000000000use super::{ connection::Connection, line::LineWriter, LevelToSyslogSeverity, SyslogConnection, SyslogFacility, SyslogLineHeader, SyslogWriterBuilder, }; use crate::{writers::log_writer::LogWriter, DeferredNow, FormatFunction}; #[cfg(test)] use std::io::BufRead; use std::{ io::{Cursor, Result as IoResult, Write}, sync::Mutex, }; /// A configurable [`LogWriter`] implementation that writes log messages to the syslog. /// /// Only available with optional crate feature `syslog_writer`. /// /// See the [writers](crate::writers) module for guidance how to use additional log writers. #[allow(clippy::module_name_repetitions)] pub struct SyslogWriter { line_writer: LineWriter, m_conn_buf: Mutex, max_log_level: log::LevelFilter, #[cfg(test)] validation_buffer: Mutex>>, } impl SyslogWriter { /// Instantiate the builder for the `SysLogWriter`. #[must_use] pub fn builder( syslog: SyslogConnection, syslog_line_header: SyslogLineHeader, syslog_facility: SyslogFacility, ) -> SyslogWriterBuilder { SyslogWriterBuilder::new(syslog, syslog_line_header, syslog_facility) } #[allow(clippy::too_many_arguments)] pub(super) fn new( pid: u32, process: String, syslog_line_header: SyslogLineHeader, facility: SyslogFacility, determine_severity: LevelToSyslogSeverity, syslog_connection: SyslogConnection, max_log_level: log::LevelFilter, format: FormatFunction, ) -> IoResult { Ok(SyslogWriter { line_writer: LineWriter::new( syslog_line_header, determine_severity, facility, process, pid, format, )?, m_conn_buf: Mutex::new(ConnectorAndBuffer { conn: syslog_connection.into_inner(), buf: Vec::with_capacity(200), }), max_log_level, #[cfg(test)] validation_buffer: Mutex::new(Cursor::new(Vec::new())), }) } } impl LogWriter for SyslogWriter { fn write(&self, now: &mut DeferredNow, record: &log::Record) -> IoResult<()> { let mut conn_buf_guard = self .m_conn_buf .lock() .map_err(|_| crate::util::io_err("SyslogWriter is poisoned"))?; let cb = &mut *conn_buf_guard; cb.buf.clear(); let mut buffer = Cursor::new(&mut cb.buf); self.line_writer .write_syslog_entry(&mut buffer, now, record)?; #[cfg(test)] { let mut valbuf = self.validation_buffer.lock().unwrap(); valbuf.write_all(&cb.buf)?; valbuf.write_all(b"\n")?; } // we _have_ to buffer above because each write here generates a syslog entry cb.conn.write_all(&cb.buf) } fn flush(&self) -> IoResult<()> { self.m_conn_buf .lock() .map_err(|_| crate::util::io_err("SyslogWriter is poisoned"))? .conn .flush() } fn max_log_level(&self) -> log::LevelFilter { self.max_log_level } #[doc(hidden)] fn validate_logs(&self, _expected: &[(&'static str, &'static str, &'static str)]) { #[cfg(test)] { let write_cursor = self.validation_buffer.lock().unwrap(); let mut reader = std::io::BufReader::new(Cursor::new(write_cursor.get_ref())); let mut buf = String::new(); #[allow(clippy::used_underscore_binding)] for tuple in _expected { buf.clear(); reader.read_line(&mut buf).unwrap(); assert!( buf.contains(tuple.0), "Did not find tuple.0 = {} in {}", tuple.0, buf ); assert!(buf.contains(tuple.1), "Did not find tuple.1 = {}", tuple.1); assert!(buf.contains(tuple.2), "Did not find tuple.2 = {}", tuple.2); } buf.clear(); reader.read_line(&mut buf).unwrap(); assert!(buf.is_empty(), "Found more log lines than expected: {buf} ",); } } } struct ConnectorAndBuffer { conn: Connection, buf: Vec, } ///////////////////////////// #[cfg(test)] mod test { use crate::{ detailed_format, writers::{ syslog_format_with_thread, SyslogConnection, SyslogFacility, SyslogLineHeader, SyslogWriter, }, FileSpec, Logger, }; use chrono::{DateTime, Local}; use log::*; use std::path::PathBuf; #[doc(hidden)] #[macro_use] mod macros { #[macro_export] macro_rules! syslog1 { ($($arg:tt)*) => ( error!(target: "{Syslog1,_Default}", $($arg)*); ) } #[macro_export] macro_rules! syslog2 { ($($arg:tt)*) => ( error!(target: "{Syslog2,_Default}", $($arg)*); ) } } #[test] fn test_syslog() { let boxed_syslog_writer1 = SyslogWriter::builder( SyslogConnection::try_udp("127.0.0.1:5555", "127.0.0.1:514").unwrap(), SyslogLineHeader::Rfc5424("JustForTest".to_owned()), SyslogFacility::LocalUse0, ) .max_log_level(log::LevelFilter::Trace) .build() .unwrap(); let boxed_syslog_writer2 = SyslogWriter::builder( SyslogConnection::try_udp("127.0.0.1:5556", "127.0.0.1:514").unwrap(), SyslogLineHeader::Rfc3164, SyslogFacility::LocalUse0, ) .max_log_level(log::LevelFilter::Trace) .format(syslog_format_with_thread) .build() .unwrap(); let logger = Logger::try_with_str("info") .unwrap() .format(detailed_format) .log_to_file(FileSpec::default().suppress_timestamp().directory(dir())) .print_message() .add_writer("Syslog1", boxed_syslog_writer1) .add_writer("Syslog2", boxed_syslog_writer2) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); // Explicitly send logs to different loggers error!(target : "{Syslog1}", "This is a syslog-relevant error msg"); warn!(target : "{Syslog1}", "This is a syslog-relevant warn msg"); info!(target : "{Syslog1}", "This is a syslog-relevant info msg"); debug!(target : "{Syslog1}", "This is a syslog-relevant debug msg"); trace!(target : "{Syslog1}", "This is a syslog-relevant trace msg"); error!(target : "{Syslog1,_Default}", "This is a syslog- and log-relevant msg"); error!(target : "{Syslog2}", "This is a syslog-relevant error msg"); warn!(target : "{Syslog2}", "This is a syslog-relevant warn msg"); info!(target : "{Syslog2}", "This is a syslog-relevant info msg"); debug!(target : "{Syslog2}", "This is a syslog-relevant debug msg"); trace!(target : "{Syslog2}", "This is a syslog-relevant trace msg"); error!(target : "{Syslog2,_Default}", "This is a syslog- and log-relevant msg"); // Nicer: use explicit macros syslog1!("This is another syslog- and log error msg"); syslog2!("This is one more syslog- and log error msg"); warn!("This is a warning message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); // Verification: // this only validates the normal log target (file) logger.validate_logs(&[ ("ERROR", "", "a syslog- and log-relevant msg"), ("ERROR", "", "a syslog- and log-relevant msg"), ("ERROR", "", "another syslog- and log error msg"), ("ERROR", "", "one more syslog- and log error msg"), ("WARN", "syslog", "This is a warning message"), ]); logger.validate_additional_logs( "Syslog1", &[ ("<131>1", "JustForTest", "is a syslog-relevant error msg"), ("<132>1", "JustForTest", "is a syslog-relevant warn msg"), ("<134>1", "JustForTest", "is a syslog-relevant info msg"), ("<135>1", "JustForTest", "is a syslog-relevant debug msg"), ("<135>1", "JustForTest", "is a syslog-relevant trace msg"), ("<131>1", "JustForTest", "is a syslog- and log-relevant msg"), ("<131>1", "JustForTest", "is another syslog- and log error"), ], ); logger.validate_additional_logs( "Syslog2", &[ ("<131>", "]: [", "This is a syslog-relevant error msg"), ("<132>", "]: [", "This is a syslog-relevant warn msg"), ("<134>", "]: [", "This is a syslog-relevant info msg"), ("<135>", "]: [", "This is a syslog-relevant debug msg"), ("<135>", "]: [", "This is a syslog-relevant trace msg"), ("<131>", "]: [", "This is a syslog- and log-relevant msg"), ("<131>", "]: [", "This is one more syslog- and log error"), ], ); } fn dir() -> PathBuf { let mut d = PathBuf::new(); d.push("log_files"); add_prog_name(&mut d); d.push(now_local().format(TS).to_string()); d } fn add_prog_name(pb: &mut PathBuf) { let path = PathBuf::from(std::env::args().next().unwrap()); let filename = path.file_stem().unwrap(/*ok*/).to_string_lossy(); let (progname, _) = filename.rsplit_once('-').unwrap_or((&filename, "")); pb.push(progname); } #[must_use] pub fn now_local() -> DateTime { Local::now() } const TS: &str = "%Y-%m-%d_%H-%M-%S"; } flexi_logger-0.29.8/src/writers/syslog.rs000064400000000000000000000007101046102023000165440ustar 00000000000000mod builder; mod connection; mod facility; mod formats; mod line; mod severity; mod syslog_connection; mod writer; #[allow(clippy::module_name_repetitions)] pub use self::{ builder::SyslogWriterBuilder, facility::SyslogFacility, formats::{syslog_default_format, syslog_format_with_thread}, line::SyslogLineHeader, severity::{LevelToSyslogSeverity, SyslogSeverity}, syslog_connection::SyslogConnection, writer::SyslogWriter, }; flexi_logger-0.29.8/src/writers.rs000064400000000000000000000120221046102023000152230ustar 00000000000000//! Describes how to extend `flexi_logger` with additional log writers. //! //! The module also contains two ready-to-use log writers, //! one for writing to files ([`FileLogWriter`]), one for writing to the syslog ([`SyslogWriter`]). //! //! Log writers can be used in two ways: //! //! * _Default output channel:_
//! You can influence to which output stream normal log messages will be written, //! i.e. those from log macro calls without explicit target specification //! (like in `log::error!("File not found")`). //! //! With one of the methods //! //! * [`Logger::log_to_stderr`](crate::Logger::log_to_stderr) (default) //! * [`Logger::log_to_stdout`](crate::Logger::log_to_stdout) //! * [`Logger::log_to_file`](crate::Logger::log_to_file) //! * [`Logger::log_to_writer`](crate::Logger::log_to_writer) //! * [`Logger::log_to_file_and_writer`](crate::Logger::log_to_file_and_writer) //! * [`Logger::do_not_log`](crate::Logger::do_not_log) //! //! you can change the default output channel. The fourth and the fifth of these methods //! take log writers as input. See their documentation for more details. //! //! Messages will only be written to the default output channel //! if they match the current [log specification](crate::LogSpecification). //! //!
//! //! * _Additional output channels:_
//! You can register additional log writers under a _target name_ with //! [`Logger::add_writer()`](crate::Logger::add_writer), and address these log writers by //! specifying the _target name_ in calls to the //! [log macros](https://docs.rs/log/latest/log/macro.log.html). //! //! The message of a log call with a _target value_ that has the form `{Name1,Name2,...}`, i.e., //! a comma-separated list of _target names_, within braces, is not sent to the default output //! channel, but to the loggers specified explicitly in the list. In such a list //! you can also specify the default output channel with the built-in target name `_Default`. //! //! Log calls that are directed to an additional output channel will not be affected by //! the value of `flexi_logger`'s log specification; //! they will always be handed over to the respective `LogWriter`, //! as you might want it for alerts or auditing. //! //! In the following example we define an alert writer, and a macro to facilitate using it //! (and avoid using the explicit target specification in the macro call), and //! show some example calls. //! //! ```rust //! use log::*; //! //! use flexi_logger::{FileSpec,Logger}; //! use flexi_logger::writers::FileLogWriter; //! //! // Configure a FileLogWriter for alert messages //! pub fn alert_logger() -> Box { //! Box::new(FileLogWriter::builder( //! FileSpec::default() //! # .directory("log_files/writers_mod_docu") //! .discriminant("Alert") //! .suffix("alerts") //! ) //! .print_message() //! .try_build() //! .unwrap()) //! } //! //! // Define a macro for writing messages to the alert log and to the normal log //! #[macro_use] //! mod macros { //! #[macro_export] //! macro_rules! alert_error { //! ($($arg:tt)*) => ( //! error!(target: "{Alert,_Default}", $($arg)*); //! ) //! } //! } //! //! fn main() { //! Logger::try_with_env_or_str("info") //! .expect("LogSpecification String has errors") //! .print_message() //! .log_to_file(FileSpec::default()) //! # .log_to_file(FileSpec::default().directory("log_files/writers_mod_docu")) //! .add_writer("Alert", alert_logger()) //! .start() //! .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); //! //! //! // Explicitly send logs to different loggers //! error!(target : "{Alert}", "This is only an alert"); //! error!(target : "{Alert,_Default}", "This is an alert and log message"); //! //! // Nicer: use the explicit macro //! alert_error!("This is another alert and log message"); //! //! // Standard log macros write only to the normal log //! error!("This is a normal error message"); //! warn!("This is a warning"); //! info!("This is an info message"); //! debug!("This is a debug message - you will not see it"); //! trace!("This is a trace message - you will not see it"); //! } //! //! ``` //! pub(crate) mod file_log_writer; mod log_writer; #[cfg(feature = "syslog_writer")] #[cfg_attr(docsrs, doc(cfg(feature = "syslog_writer")))] mod syslog; #[cfg(feature = "syslog_writer")] #[cfg_attr(docsrs, doc(cfg(feature = "syslog_writer")))] pub use self::syslog::{ syslog_default_format, syslog_format_with_thread, LevelToSyslogSeverity, SyslogConnection, SyslogFacility, SyslogLineHeader, SyslogSeverity, SyslogWriter, SyslogWriterBuilder, }; pub use self::file_log_writer::{ ArcFileLogWriter, FileLogWriter, FileLogWriterBuilder, FileLogWriterConfig, FileLogWriterHandle, }; pub use self::log_writer::LogWriter; flexi_logger-0.29.8/tests/test_age_or_size.rs000064400000000000000000000062631046102023000174360ustar 00000000000000mod test_utils; use flexi_logger::{Age, Cleanup, Criterion, Duplicate, FileSpec, Logger, Naming}; use glob::glob; use log::*; use std::fs::File; use std::io::{BufRead, BufReader}; use std::ops::Add; use std::path::Path; #[test] fn test_age_or_size() { let directory = test_utils::dir(); test_utils::wait_for_start_of_second(); Logger::try_with_str("trace") .unwrap() .format_for_files(flexi_logger::detailed_format) .log_to_file(FileSpec::default().directory(&directory)) .duplicate_to_stderr(Duplicate::Info) .rotate( Criterion::AgeOrSize(Age::Second, 265), Naming::Numbers, Cleanup::Never, ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); // info!("test correct rotation by age or size"); write_log_lines(); verify_logs(&directory); } fn write_log_lines() { trace!("{}", 'A'); // wait to enforce a rotation std::thread::sleep(std::time::Duration::from_millis(1100)); // Fill first three files by size trace!("{}", 'a'); trace!("{}", 'b'); trace!("{}", 'c'); trace!("{}", 'd'); trace!("{}", 'e'); trace!("{}", 'f'); trace!("{}", 'g'); trace!("{}", 'h'); trace!("{}", 'i'); trace!("{}", 'j'); // now wait to enforce a rotation with a smaller file std::thread::sleep(std::time::Duration::from_secs(2)); trace!("{}", 'k'); // now wait to enforce a rotation with a smaller file std::thread::sleep(std::time::Duration::from_secs(2)); trace!("{}", 'l'); // then again fill a file by size trace!("{}", 'm'); trace!("{}", 'n'); // and do the final rotation: trace!("{}", 'o'); } fn verify_logs(directory: &Path) { let mut error_detected = false; let expected_line_counts = [1, 3, 3, 3, 1, 1, 3, 1]; // read all files let pattern = directory.display().to_string().add("/*"); let globresults = match glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}",), Ok(globresults) => globresults, }; let mut no_of_log_files = 0; let mut total_line_count = 0_usize; for (index, globresult) in globresults.into_iter().enumerate() { let mut line_count = 0_usize; let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let f = File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")); no_of_log_files += 1; let mut reader = BufReader::new(f); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { line_count += 1; } println!("file {pathbuf:?}:\n{buffer}"); if line_count != expected_line_counts[index] { error_detected = true; } total_line_count += line_count; } if no_of_log_files != 8 { println!("wrong file count: {no_of_log_files} instead of 8"); error_detected = true; } if total_line_count != 16 { println!("wrong line count: {total_line_count} instead of 16"); error_detected = true; }; assert!(!error_detected); } flexi_logger-0.29.8/tests/test_colors.rs000064400000000000000000000006531046102023000164460ustar 00000000000000use flexi_logger::Logger; use log::*; #[test] fn test_mods() { Logger::try_with_str("trace") .unwrap() .log_to_stdout() .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message"); trace!("This is a trace message"); } flexi_logger-0.29.8/tests/test_custom_log_writer.rs000064400000000000000000000063771046102023000207250ustar 00000000000000mod test_utils; use flexi_logger::writers::LogWriter; use flexi_logger::{default_format, DeferredNow, FormatFunction, Logger}; use log::*; use std::sync::Mutex; const COUNT: u8 = 2; #[test] fn test_custom_log_writer() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let mut logger = Logger::try_with_str("info").unwrap(); match value { 0 => { logger = logger.log_to_writer(Box::new(CustomWriter { data: Mutex::new(Vec::new()), format: default_format, mode: 0, })); } 1 => { logger = logger.log_to_writer(Box::new(CustomWriter { data: Mutex::new(Vec::new()), format: default_format, mode: 1, })); logger = logger.format(custom_format); } COUNT..=u8::MAX => unreachable!("asAS"), } let handle = logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); handle.validate_logs(&[ ( "ERROR", "test_custom_log_writer", "This is an error message", ), ("WARN", "test_custom_log_writer", "This is a warning"), ("INFO", "test_custom_log_writer", "This is an info message"), ]); } pub struct CustomWriter { data: Mutex>, format: FormatFunction, mode: u8, } impl LogWriter for CustomWriter { fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { let mut data = self.data.lock().unwrap(); (self.format)(&mut *data, now, record) } fn flush(&self) -> std::io::Result<()> { Ok(()) } fn format(&mut self, format: FormatFunction) { self.format = format; } fn max_log_level(&self) -> log::LevelFilter { log::LevelFilter::Trace } fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { let data = self.data.lock().unwrap(); let expected_data = match self.mode { 0 => expected .iter() .fold(Vec::new(), |mut acc, (level, module, message)| { acc.extend(format!("{level} [{module}] {message}").bytes()); acc }), 1 => expected .iter() .fold(Vec::new(), |mut acc, (level, _module, message)| { acc.extend(format!("{level}: {message}").bytes()); acc }), COUNT..=u8::MAX => { unreachable!("sadadsd") } }; assert_eq!( String::from_utf8_lossy(&data), String::from_utf8_lossy(&expected_data) ); } } fn custom_format( writer: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { // Only write the message and the level, without the module write!(writer, "{}: {}", record.level(), &record.args()) } flexi_logger-0.29.8/tests/test_default_file_and_writer.rs000064400000000000000000000032431046102023000220040ustar 00000000000000use flexi_logger::writers::{FileLogWriter, LogWriter}; use flexi_logger::{detailed_format, FileSpec, Logger}; use log::*; mod test_utils; #[test] fn test_default_file_and_writer() { let file_spec_bar = FileSpec::default() .directory(self::test_utils::dir()) .suppress_timestamp() .discriminant("bar"); let file_spec_foo = file_spec_bar.clone().discriminant("foo"); let bar_writer = FileLogWriter::builder(file_spec_bar.clone()) .format(detailed_format) .try_build() .unwrap(); { let handle = Logger::try_with_str("info") .unwrap() .log_to_file_and_writer(file_spec_foo, Box::new(bar_writer)) .format(detailed_format) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); handle.validate_logs(&[ ("ERROR", "test_default_file_and_writer", "error"), ("WARN", "test_default_file_and_writer", "warning"), ("INFO", "test_default_file_and_writer", "info"), ]); } let bar_writer = FileLogWriter::builder(file_spec_bar) .format(detailed_format) .append() .try_build() .unwrap(); bar_writer.validate_logs(&[ ("ERROR", "test_default_file_and_writer", "error"), ("WARN", "test_default_file_and_writer", "warning"), ("INFO", "test_default_file_and_writer", "info"), ]); } flexi_logger-0.29.8/tests/test_env_logger_style.rs000064400000000000000000000006541046102023000205150ustar 00000000000000use log::*; #[test] fn you_must_see_exactly_three_messages_above_1_err_1_warn_1_info() { flexi_logger::Logger::try_with_str("info") .unwrap() .start() .unwrap(); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); } flexi_logger-0.29.8/tests/test_error_channel.rs000064400000000000000000000040551046102023000177660ustar 00000000000000mod test_utils; #[cfg(feature = "async")] const COUNT: u8 = 4; #[cfg(feature = "async")] #[test] fn test_error_channels() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } #[cfg(feature = "async")] fn work(value: u8) { use flexi_logger::{ErrorChannel, FileSpec, Logger, WriteMode}; use log::*; use std::{ fs::File, io::{BufRead, BufReader}, }; let mut logger = Logger::try_with_str("info") .unwrap() .log_to_file(FileSpec::default().directory(test_utils::dir())); { logger = logger.write_mode(WriteMode::Async); } let err_file = test_utils::file("flexi_logger_error_channel.err"); match value { 0 => { logger = logger.error_channel(ErrorChannel::StdErr); } 1 => { logger = logger.error_channel(ErrorChannel::StdOut); } 2 => { logger = logger.error_channel(ErrorChannel::File(err_file.clone())); } 3 => { logger = logger.error_channel(ErrorChannel::DevNull); } COUNT..=u8::MAX => { unreachable!("djdjfäfdl") } }; { // start logger, and force its immediate drop let _logger_handle = logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); } error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); debug!("This is a debug message - you must not see it!"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); trace!("This is a trace message - you must not see it!"); trace!("This is a trace message - you must not see it!"); if value == 2 { let lines = BufReader::new(File::open(err_file).unwrap()) .lines() .count(); // two lines per failing error!, warn!, or info! call: assert_eq!(lines, 6); } } flexi_logger-0.29.8/tests/test_error_channel_error.rs000064400000000000000000000102041046102023000211700ustar 00000000000000mod test_utils; use std::{ env, fs::{create_dir_all, remove_file, OpenOptions}, io::Write, path::{Path, PathBuf}, process::{Command, Stdio}, }; const CTRL_INDEX: &str = "CTRL_INDEX"; const CRASHFILE: &str = "CRASHFILE"; const RUNS: usize = 3; const MILLIS: u64 = 50; // use the same technique as test_utils::dispatch to launch itself in child mode, // but do it twice: // controller starts parent, parent starts child // controller keeps running and verifies that the child's panic file is created (or not), // parent terminates directly and thus destroys the stderr of child, thus forcing child to panic #[test] fn main() { match env::var(CTRL_INDEX).as_ref() { Err(_) => { controller(); } Ok(s) if s == "parent" => { parent(false); } Ok(s) if s == "parent_panic" => { parent(true); } Ok(s) if s == "child" => { child(false); } Ok(s) if s == "child_panic" => { child(true); } Ok(s) => panic!("Unexpected value {s}"), } } fn controller() { let progpath = env::args().next().unwrap(); create_dir_all(crashdump_file().parent().unwrap()).unwrap(); remove_file(crashdump_file()).ok(); // First run: don't panic let mut child = Command::new(progpath.clone()) .env(CTRL_INDEX, "parent") .stdout(Stdio::null()) .stderr(Stdio::piped()) .spawn() .unwrap(); assert!(child.wait().expect("failed to wait on child").success()); // check that no crashdump_file was written std::thread::sleep(std::time::Duration::from_millis(200)); assert!(!Path::new(&crashdump_file()).try_exists().unwrap()); // Second run: panic let mut child = Command::new(progpath) .env(CTRL_INDEX, "parent_panic") .stdout(Stdio::null()) .stderr(Stdio::piped()) .spawn() .unwrap(); assert!(child.wait().expect("failed to wait on child").success()); // check that crashdump_file was written std::thread::sleep(std::time::Duration::from_millis(200)); assert!(Path::new(&crashdump_file()).try_exists().unwrap()); } fn parent(panic: bool) { let progpath = std::env::args().next().unwrap(); // we don't want to wait here, and it's not an issue because this is not a long running program #[allow(clippy::zombie_processes)] // spawn child and terminate directly, thus destroying the child's stderr Command::new(progpath) .env(CTRL_INDEX, if panic { "child_panic" } else { "child" }) .stdout(Stdio::null()) .stderr(Stdio::piped()) .spawn() .unwrap(); } fn child(panic: bool) { let original_hook = std::panic::take_hook(); std::panic::set_hook(Box::new(move |panic| { let backtrace = std::backtrace::Backtrace::capture(); let mut file = OpenOptions::new() .create(true) .write(true) .truncate(true) .open(crashdump_file()) .unwrap(); file.write_all(format!("Panic occured:\n{}\n{}\n", panic, backtrace).as_bytes()) .unwrap(); file.flush().unwrap(); original_hook(panic); })); let _logger = flexi_logger::Logger::try_with_str("info") .unwrap() .log_to_stderr() .panic_if_error_channel_is_broken(panic) .start() .unwrap(); for i in 0..RUNS { log::info!("log test ({i})"); // <-- causes panic when parent terminated std::thread::sleep(std::time::Duration::from_millis(MILLIS)); } } // controller is first caller and writes name to env, all other calls should find the env // and take the value from there fn crashdump_file() -> PathBuf { match std::env::var(CRASHFILE) { Ok(s) => Path::new(&s).to_path_buf(), Err(_) => { let progname = PathBuf::from(std::env::args().next().unwrap()) .file_name() .unwrap() .to_string_lossy() .to_string(); let path = test_utils::file(&format!("./{progname}.log")); std::env::set_var(CRASHFILE, &path); path } } } flexi_logger-0.29.8/tests/test_external_delete.rs000064400000000000000000000044261046102023000203130ustar 00000000000000mod test_utils; use flexi_logger::{FileSpec, Logger, WriteMode}; use log::*; use std::path::Path; #[cfg(feature = "async")] const COUNT: u8 = 3; #[cfg(not(feature = "async"))] const COUNT: u8 = 2; #[test] fn test_external_delete() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let mut logger = Logger::try_with_str("info").unwrap(); let file_spec = FileSpec::default() .directory(self::test_utils::dir()) .suppress_timestamp() .basename("myprog"); let file_path = file_spec.as_pathbuf(None); logger = logger.log_to_file(file_spec); // ToDo: test with all write modes, with and without rotation match value { 0 => { logger = logger.write_mode(WriteMode::Direct); } 1 => { logger = logger.write_mode(WriteMode::BufferAndFlush); } #[cfg(feature = "async")] 2 => { logger = logger.write_mode(WriteMode::Async); } COUNT..=u8::MAX => { unreachable!("dtrtgfg") } }; let logger = logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); // write some log lines to initialize the file info!("XXX 1 AAA"); info!("XXX 2 AAA"); info!("XXX 3 AAA"); // write log lines, and delete the log file intermittently for i in 0..100 { if i % 25 == 20 { logger.flush(); std::thread::sleep(std::time::Duration::from_millis(100)); let lines = count_lines(&file_path); match std::fs::remove_file(file_path.clone()) { Ok(()) => { println!("Removed the log file {file_path:?}, which had {lines} lines"); logger.reopen_output().unwrap(); } Err(e) => { panic!("Cannot remove log file {file_path:?}, i = {i}, reason {e:?}") } } } info!("YYY {} AAA", i); } logger.flush(); assert!(count_lines(&file_path) < 30, "wrong number of lines",); } fn count_lines(path: &Path) -> usize { match std::fs::read_to_string(path) { Ok(s) => s.lines().filter(|line| line.contains("AAA")).count(), Err(_e) => 0, } } flexi_logger-0.29.8/tests/test_external_rename.rs000064400000000000000000000061651046102023000203220ustar 00000000000000mod test_utils; use flexi_logger::{FileSpec, Logger, WriteMode}; use log::*; use std::path::Path; #[cfg(feature = "async")] const COUNT: u8 = 3; #[cfg(not(feature = "async"))] const COUNT: u8 = 2; #[test] fn test_external_rename() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let mut logger = Logger::try_with_str("info").unwrap(); let file_spec = FileSpec::default() .directory(self::test_utils::dir()) .suppress_timestamp() .basename("myprog"); let file_path = file_spec.as_pathbuf(None); logger = logger.log_to_file(file_spec); // ToDo: test with all write modes, with and without rotation match value { 0 => { logger = logger.write_mode(WriteMode::Direct); } 1 => { logger = logger.write_mode(WriteMode::BufferAndFlush); } #[cfg(feature = "async")] 2 => { logger = logger.write_mode(WriteMode::Async); } COUNT..=u8::MAX => { unreachable!("dtrtgfg") } }; // create the "moved" folder let mut mv_dir = file_path.clone(); mv_dir.pop(); mv_dir.push("moved"); std::fs::create_dir_all(mv_dir.clone()).unwrap(); let target_filespec = FileSpec::try_from(&file_path) .unwrap() .directory(mv_dir.clone()); { let logger = logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); // write some log lines to initialize the file info!("XXX 1 AAA"); info!("XXX 2 AAA"); info!("XXX 3 AAA"); // write log lines in a slow loop, and rename the log file intermittently for i in 0..100 { if i % 25 == 20 { let target_path = target_filespec.as_pathbuf(Some(&i.to_string())); match std::fs::rename(file_path.clone(), target_path.clone()) { Ok(()) => { println!("Renamed the log file {:?} to {:?}", file_path, &target_path); logger.reopen_output().unwrap(); } Err(e) => { panic!( "Cannot rename log file {file_path:?} to {target_path:?} due to {e:?}", ) } } } std::thread::sleep(std::time::Duration::from_millis(10)); info!("YYY {} AAA", i); } } // verify that all log lines are written and are found in moved files let mut files = 1; let mut sum = count_lines(&file_path); for entry in std::fs::read_dir(mv_dir).unwrap() { let entry = entry.unwrap(); let lines = count_lines(&entry.path()); sum += lines; if lines > 0 { files += 1; } } assert_eq!(files, 5, "wrong number of files"); assert_eq!(sum, 103, "wrong number of log lines"); } fn count_lines(path: &Path) -> usize { match std::fs::read_to_string(path) { Ok(s) => s.lines().filter(|line| line.contains("AAA")).count(), Err(_e) => 0, } } flexi_logger-0.29.8/tests/test_file_writer.rs000064400000000000000000000077131046102023000174640ustar 00000000000000mod test_utils; use flexi_logger::{detailed_format, opt_format, Cleanup, Criterion, FileSpec, Logger, Naming}; use log::*; const COUNT: u8 = 8; #[test] fn test_write_modes() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let link_name = "link_to_log".to_string(); let mut logger = Logger::try_with_str("info").unwrap(); match value { 0 => { logger = logger.log_to_file( FileSpec::default() .directory(self::test_utils::dir()) .basename("to_foo_or_not_to_foo"), ); } 1 => { logger = logger .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never); } 2 => { logger = logger .format(detailed_format) .log_to_file( FileSpec::default() .directory(self::test_utils::dir()) .use_timestamp(false), ) .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never); } 3 => { logger = logger .format(detailed_format) .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never); } 4 => { logger = logger .format(opt_format) .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()) .discriminant("foo".to_string()), ) .rotate(Criterion::Size(2000), Naming::Numbers, Cleanup::Never) .create_symlink(link_name.clone()); } 5 => { logger = logger.format(opt_format).log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()) .discriminant("foo"), ); } 6 => { logger = logger.format(opt_format).log_to_file( FileSpec::default() .directory(self::test_utils::dir()) .suppress_basename(), ); } 7 => { logger = logger.format(opt_format).log_to_file( FileSpec::default() .directory(self::test_utils::dir()) .suppress_basename() .discriminant("foo"), ); } COUNT..=u8::MAX => { unreachable!("dtrtgfg") } }; let handle = logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); handle.validate_logs(&[ ("ERROR", "test_file_writer", "error"), ("WARN", "test_file_writer", "warning"), ("INFO", "test_file_writer", "info"), ]); if value == 4 { self::platform::check_link(&link_name); } } mod platform { #[cfg(target_family = "unix")] pub fn check_link(link_name: &str) { match std::fs::symlink_metadata(link_name) { Err(e) => panic!("error with symlink: {e}"), Ok(metadata) => assert!(metadata.file_type().is_symlink(), "not a symlink"), } } #[cfg(not(target_family = "unix"))] pub fn check_link(_: &str) {} } flexi_logger-0.29.8/tests/test_file_writer_as_writer.rs000064400000000000000000000055611046102023000215420ustar 00000000000000mod test_utils; use flexi_logger::{writers::FileLogWriter, FileSpec, Logger}; use log::*; const COUNT: u8 = 3; #[test] fn test_write_modes() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let link_name = "link_to_log".to_string(); let mut logger = Logger::try_with_str("trace").unwrap(); let flwb = FileLogWriter::builder( FileSpec::default() .directory(self::test_utils::dir()) .basename("to_foo_or_not_to_foo"), ); match value { 0 => { logger = logger.log_to_writer(Box::new( flwb.max_level(LevelFilter::Debug).try_build().unwrap(), )); } 1 => { logger = logger.log_to_writer(Box::new( flwb.max_level(LevelFilter::Trace).try_build().unwrap(), )); } 2 => { logger = logger.log_to_writer(Box::new(flwb.try_build().unwrap())); } COUNT..=u8::MAX => { unreachable!() } }; let handle = logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message"); trace!("This is a trace message"); match value { 0 => { handle.validate_logs(&[ ("ERROR", "test_file_writer", "error"), ("WARN", "test_file_writer", "warning"), ("INFO", "test_file_writer", "info"), ("DEBUG", "test_file_writer", "debug"), ]); } 1 => { handle.validate_logs(&[ ("ERROR", "test_file_writer", "error"), ("WARN", "test_file_writer", "warning"), ("INFO", "test_file_writer", "info"), ("DEBUG", "test_file_writer", "debug"), ("TRACE", "test_file_writer", "trace"), ]); } 2 => { handle.validate_logs(&[ ("ERROR", "test_file_writer", "error"), ("WARN", "test_file_writer", "warning"), ("INFO", "test_file_writer", "info"), ("DEBUG", "test_file_writer", "debug"), ("TRACE", "test_file_writer", "trace"), ]); } COUNT..=u8::MAX => { unreachable!() } } if value == 4 { self::platform::check_link(&link_name); } } mod platform { #[cfg(target_family = "unix")] pub fn check_link(link_name: &str) { match std::fs::symlink_metadata(link_name) { Err(e) => panic!("error with symlink: {e}"), Ok(metadata) => assert!(metadata.file_type().is_symlink(), "not a symlink"), } } #[cfg(not(target_family = "unix"))] pub fn check_link(_: &str) {} } flexi_logger-0.29.8/tests/test_force_utc_1_panic.rs000064400000000000000000000007611046102023000205100ustar 00000000000000mod test_utils; use flexi_logger::{DeferredNow, Logger}; use log::*; #[test] #[should_panic(expected = "offset is already initialized not to enforce UTC")] fn test_force_utc_1_panic() { let _ = Logger::try_with_str("info") .unwrap() .format_for_stderr(flexi_logger::detailed_format) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!("MUST BE REACHED"); DeferredNow::force_utc(); panic!("MUST NOT BE REACHED"); } flexi_logger-0.29.8/tests/test_force_utc_3.rs000064400000000000000000000007101046102023000173320ustar 00000000000000mod test_utils; use flexi_logger::{DeferredNow, Logger}; use log::*; #[test] #[should_panic(expected = "we arrived here, everything OK")] fn test_force_utc_3() { DeferredNow::force_utc(); let _ = Logger::try_with_str("info") .unwrap() .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); DeferredNow::force_utc(); info!("must be printed"); panic!("we arrived here, everything OK"); } flexi_logger-0.29.8/tests/test_force_utc_4.rs000064400000000000000000000023561046102023000173430ustar 00000000000000mod test_utils; use chrono::{Local, NaiveDate, NaiveDateTime, NaiveTime}; use flexi_logger::{detailed_format, FileSpec, Logger}; use log::*; #[test] fn test_force_utc_4() { let mut path = test_utils::dir(); let _ = Logger::try_with_str("info") .unwrap() .use_utc() .format(detailed_format) .log_to_file( FileSpec::default() .directory(&path) .basename("test") .suppress_timestamp(), ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!("must be printed"); let now = Local::now(); // parse timestamp from written file path.push("test.log"); let s = std::fs::read_to_string(path).unwrap(); let d: NaiveDateTime = NaiveDateTime::new( s[1..11].parse::().unwrap(), s[12..27].parse::().unwrap(), ); if now.offset().utc_minus_local().abs() > 100 { // local TZ is different from UTC -> verify that UTC was written to the file let now_local = now.naive_local(); let diff = (now_local - d).num_seconds(); println!("d: {d}, now_local: {now_local}, diff: {diff}"); assert!(diff.abs() >= 10); } } flexi_logger-0.29.8/tests/test_json.rs000064400000000000000000000045361046102023000161220ustar 00000000000000mod test_utils; #[cfg(feature = "json")] use flexi_logger::{json_format, FileSpec, Logger}; #[cfg(feature = "json")] use log::*; #[cfg(feature = "json")] #[test] fn test_json() { let logger = Logger::try_with_str("trace") .unwrap() .log_to_file(FileSpec::default().directory(self::test_utils::dir())) .format(json_format) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); let duration = std::time::Duration::from_millis(200); if cfg!(feature = "kv") { #[cfg(feature = "kv")] error!( a = 1, b = "2 beer or not 2 beer"; "This is an error message {} {:?}", 5, duration ); } else { error!("This is an error message {} {:?}", 5, duration); } warn!("This is a warning message {} {:?}", 4, duration); info!("This is an info message {} {:?}", 2, duration); if cfg!(feature = "kv") { logger.validate_logs(&[ ( "{\"level\":\"ERROR\",\"timestamp\"", "\"module_path\":\"test_json\"", "\"line\":20,\"kv\":{\"a\":1,\"b\":\"2 beer or not 2 beer\"},\ \"text\":\"This is an error message 5 200ms\"}", ), ( "{\"level\":\"WARN\",\"timestamp\"", "\"module_path\":\"test_json\"", "\"line\":30,\"text\":\"This is a warning message 4 200ms\"", ), ( "{\"level\":\"INFO\",\"timestamp\"", "\"module_path\":\"test_json\"", "\"line\":31,\"text\":\"This is an info message 2 200ms\"", ), ]); } else { logger.validate_logs(&[ ( "{\"level\":\"ERROR\",\"timestamp\"", "\"module_path\":\"test_json\"", "\"line\":28,\"text\":\"This is an error message 5 200ms\"", ), ( "{\"level\":\"WARN\",\"timestamp\"", "\"module_path\":\"test_json\"", "\"line\":30,\"text\":\"This is a warning message 4 200ms\"", ), ( "{\"level\":\"INFO\",\"timestamp\"", "\"module_path\":\"test_json\"", "\"line\":31,\"text\":\"This is an info message 2 200ms\"", ), ]); } } flexi_logger-0.29.8/tests/test_kv.rs000064400000000000000000000022011046102023000155540ustar 00000000000000mod test_utils; use flexi_logger::{FileSpec, Logger}; use log::*; #[test] fn test_kv() { {} let logger = Logger::try_with_str("trace") .unwrap() .log_to_file(FileSpec::default().directory(self::test_utils::dir())) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); if cfg!(feature = "kv") { #[cfg(feature = "kv")] error!( a = 1, b = "2 beer or not 2 beer"; "This is an error message {}", 5 ); } else { error!("This is an error message {}", 5); } warn!("This is a warning message {}", 4); info!("This is an info message {}", 2); if cfg!(feature = "kv") { logger.validate_logs(&[ ("ERROR", "[test_kv] {a=1, b=\"2 beer or not 2 beer\"}", ""), ("WARN", "[test_kv]", "is a warning"), ("INFO", "[test_kv]", "is an info"), ]); } else { logger.validate_logs(&[ ("ERROR", "[test_kv]", ""), ("WARN", "[test_kv]", "is a warning"), ("INFO", "[test_kv]", "is an info"), ]); } } flexi_logger-0.29.8/tests/test_mods.rs000064400000000000000000000044601046102023000161070ustar 00000000000000mod test_utils; use flexi_logger::{detailed_format, FileSpec, Logger}; use log::*; #[test] fn test_mods() { let handle = Logger::try_with_env_or_str( "info, test_mods::mymod1=debug, test_mods::mymod2=error, test_mods::mymod1::mysubmod = off", ) .unwrap() .format(detailed_format) .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); mymod1::test_traces(); mymod2::test_traces(); handle.validate_logs(&[ ("ERROR", "test_mods", "error"), ("WARN", "test_mods", "warning"), ("INFO", "test_mods", "info"), ("ERROR", "test_mods::mymod1", "error"), ("WARN", "test_mods::mymod1", "warning"), ("INFO", "test_mods::mymod1", "info"), ("DEBUG", "test_mods::mymod1", "debug"), ("ERROR", "test_mods::mymod2", "error"), ]); } mod mymod1 { use log::*; pub fn test_traces() { error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message"); trace!("This is a trace message - you must not see it!"); self::mysubmod::test_traces(); } mod mysubmod { use log::*; pub fn test_traces() { error!("This is an error message - you must not see it!"); warn!("This is a warning - you must not see it!"); info!("This is an info message - you must not see it!"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); } } } mod mymod2 { use log::*; pub fn test_traces() { error!("This is an error message"); warn!("This is a warning - you must not see it!"); info!("This is an info message - you must not see it!"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); } } flexi_logger-0.29.8/tests/test_mods_off.rs000064400000000000000000000032571046102023000167440ustar 00000000000000mod test_utils; use flexi_logger::{detailed_format, FileSpec, Logger, LoggerHandle}; use log::*; #[test] fn test_mods_off() { let handle: LoggerHandle = Logger::try_with_env_or_str("info, test_mods_off::mymod1=off") .unwrap() .format(detailed_format) .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); mymod1::test_traces(); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); handle.validate_logs(&[ ("ERROR", "test_mods", "error"), ("WARN", "test_mods", "warning"), ("INFO", "test_mods", "info"), ]); } mod mymod1 { use log::*; pub fn test_traces() { error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message"); trace!("This is a trace message - you must not see it!"); self::mysubmod::test_traces(); } mod mysubmod { use log::*; pub fn test_traces() { error!("This is an error message - you must not see it!"); warn!("This is a warning - you must not see it!"); info!("This is an info message - you must not see it!"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); } } } flexi_logger-0.29.8/tests/test_multi_logger.rs000064400000000000000000000075431046102023000176430ustar 00000000000000mod test_utils; use flexi_logger::{ detailed_format, writers::{FileLogWriter, LogWriter}, DeferredNow, FileSpec, Logger, }; use log::*; use std::sync::Arc; #[macro_use] mod macros { #[macro_export] macro_rules! sec_alert_error { ($($arg:tt)*) => ( error!(target: "{Sec,Alert,_Default}", $($arg)*); ) } } #[test] fn test() { // more complex just to support validation: let (sec_writer, sec_handle) = SecWriter::new(); let logger = Logger::try_with_str("info, fantasy = trace") .unwrap() .format(detailed_format) .print_message() .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .add_writer("Sec", sec_writer) .add_writer("Alert", alert_logger()) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); // Explicitly send logs to different loggers error!(target : "{Sec}", "This is a security-relevant error message"); error!(target : "{Sec,Alert}", "This is a security-relevant alert message"); error!(target : "{Sec,Alert,_Default}", "This is a security-relevant alert and log message"); error!(target : "{Alert}", "This is an alert"); // Nicer: use explicit macros sec_alert_error!("This is another security-relevant alert and log message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); trace!(target: "phantasia", "this is a trace you should not see"); trace!(target: "fantasy", "this is a trace you should see"); // Switching off logging has no effect on non-default targets logger.parse_new_spec("Off").unwrap(); sec_alert_error!("This is a further security-relevant alert and log message"); // Verification: #[rustfmt::skip] logger.validate_logs(&[ ("ERROR", "multi_logger", "a security-relevant alert and log message"), ("ERROR", "multi_logger", "another security-relevant alert and log message"), ("WARN", "multi_logger", "warning"), ("INFO", "multi_logger", "info"), ("TRACE", "multi_logger", "this is a trace you should see"), ]); #[rustfmt::skip] sec_handle.validate_logs(&[ ("ERROR", "multi_logger", "security-relevant error"), ("ERROR", "multi_logger", "a security-relevant alert"), ("ERROR", "multi_logger", "security-relevant alert and log message"), ("ERROR", "multi_logger", "another security-relevant alert"), ("ERROR", "multi_logger", "a further security-relevant alert"), ]); } struct SecWriter(Arc); impl SecWriter { pub fn new() -> (Box, Arc) { let a_flw = Arc::new( FileLogWriter::builder( FileSpec::default() .directory(self::test_utils::dir()) .discriminant("Security") .suffix("seclog"), ) .print_message() .try_build() .unwrap(), ); (Box::new(SecWriter(Arc::clone(&a_flw))), a_flw) } } impl LogWriter for SecWriter { fn write(&self, now: &mut DeferredNow, record: &Record) -> std::io::Result<()> { self.0.write(now, record) } fn flush(&self) -> std::io::Result<()> { self.0.flush() } fn max_log_level(&self) -> log::LevelFilter { log::LevelFilter::Error } } pub fn alert_logger() -> Box { Box::new( FileLogWriter::builder( FileSpec::default() .directory(self::test_utils::dir()) .discriminant("Alert") .suffix("alerts"), ) .print_message() .try_build() .unwrap(), ) } flexi_logger-0.29.8/tests/test_multi_threaded_cleanup_async.rs000064400000000000000000000213101046102023000230340ustar 00000000000000mod test_utils; #[cfg(feature = "compress")] mod d { use chrono::{Local, NaiveDateTime}; use cond_sync::{CondSync, Other}; use flate2::bufread::GzDecoder; use flexi_logger::{ Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, Logger, Naming, WriteMode, }; use glob::glob; use log::*; use std::{ collections::BTreeMap, fs::File, io::{BufRead, BufReader, Write}, ops::Add, path::{Path, PathBuf}, thread::JoinHandle, }; const NO_OF_THREADS: usize = 5; const NO_OF_LOGLINES_PER_THREAD: usize = 20_000; const ROTATE_OVER_SIZE: u64 = 600_000; const NO_OF_LOG_FILES: usize = 2; const NO_OF_GZ_FILES: usize = 5; // we use a special log line format that starts with a special string so that it is easier to // verify that all log lines are written correctly #[test] fn multi_threaded() { let start = Local::now(); let directory = super::test_utils::dir(); let end = { let logger = Logger::try_with_str("debug") .unwrap() .log_to_file(FileSpec::default().directory(&directory)); #[cfg(not(feature = "async"))] let logger = logger.write_mode(WriteMode::BufferAndFlush); #[cfg(feature = "async")] let logger = logger.write_mode(WriteMode::Async); let logger = logger .format(test_format) .duplicate_to_stderr(Duplicate::Info) .rotate( Criterion::Size(ROTATE_OVER_SIZE), Naming::Timestamps, Cleanup::KeepLogAndCompressedFiles(NO_OF_LOG_FILES, NO_OF_GZ_FILES), ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!( "create a huge number of log lines with a considerable number of threads, \ verify the log" ); let cond_sync = CondSync::new(0_usize); let worker_handles = start_worker_threads(NO_OF_THREADS, &cond_sync); cond_sync .wait_until(|value| *value == NO_OF_THREADS) .unwrap(); logger.set_new_spec(LogSpecification::parse("trace").unwrap()); wait_for_workers_to_close(worker_handles); Local::now() }; let delta1_ms = end.signed_duration_since(start).num_milliseconds(); let delta2_ms = Local::now().signed_duration_since(end).num_milliseconds(); println!( "Task executed with {NO_OF_THREADS} threads in {delta1_ms} ms, \ program added {delta2_ms} ms to finish writing logs.", ); verify_logs(&directory.display().to_string()); } // Starts given number of worker threads and lets each execute `do_work` fn start_worker_threads( no_of_workers: usize, cond_sync: &CondSync, ) -> Vec> { let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); trace!("Starting {} worker threads", no_of_workers); for thread_number in 0..no_of_workers { trace!("Starting thread {}", thread_number); let cond_sync_t = cond_sync.clone(); worker_handles.push( std::thread::Builder::new() .name(thread_number.to_string()) .spawn(move || { do_work(thread_number, cond_sync_t); 0 }) .unwrap(), ); } trace!("All {} worker threads started.", worker_handles.len()); worker_handles } fn do_work(thread_number: usize, cond_sync: CondSync) { trace!("({}) Thread started working", thread_number); trace!("ERROR_IF_PRINTED"); cond_sync .modify_and_notify(|value| *value += 1, Other::One) .unwrap(); for idx in 0..NO_OF_LOGLINES_PER_THREAD { debug!("({}) writing out line number {}", thread_number, idx); } trace!("MUST_BE_PRINTED"); } fn wait_for_workers_to_close(worker_handles: Vec>) { for worker_handle in worker_handles { worker_handle .join() .unwrap_or_else(|e| panic!("Joining worker thread failed: {e:?}")); } trace!("All worker threads joined."); } pub fn test_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> std::io::Result<()> { write!( w, "XXXXX [{}] T[{:?}] {} [{}:{}] {}", now.now().format("%Y-%m-%d %H:%M:%S%.6f %:z"), std::thread::current().name().unwrap_or(""), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), &record.args() ) } fn verify_logs(directory: &str) { let basename = String::from(directory).add("/").add( &std::path::Path::new(&std::env::args().next().unwrap()) .file_stem().unwrap(/*cannot fail*/) .to_string_lossy(), ); let mut counters = Counters { total: (None, BTreeMap::new()), threads: [ (None, BTreeMap::new()), (None, BTreeMap::new()), (None, BTreeMap::new()), (None, BTreeMap::new()), (None, BTreeMap::new()), ], }; let fn_pattern = String::with_capacity(180) .add(&basename) // .add("_r[0-9][0-9]*."); .add("_r*."); let no_of_log_files = glob(&fn_pattern.clone().add("log")) .unwrap() .map(Result::unwrap) .inspect(|p| inspect_file(p, &mut counters)) .count(); let no_of_gz_files = glob(&fn_pattern.add("gz")) .unwrap() .map(Result::unwrap) .inspect(|p| inspect_file(p, &mut counters)) .count(); assert_eq!(no_of_log_files, NO_OF_LOG_FILES + 1); assert_eq!(no_of_gz_files, NO_OF_GZ_FILES); // info!("Found correct number of log and compressed files"); write_csv(directory, "total.csv", &counters.total.1); write_csv(directory, "thread_0.csv", &counters.threads[0].1); write_csv(directory, "thread_1.csv", &counters.threads[1].1); write_csv(directory, "thread_2.csv", &counters.threads[2].1); write_csv(directory, "thread_3.csv", &counters.threads[3].1); write_csv(directory, "thread_4.csv", &counters.threads[4].1); } fn inspect_file(p: &Path, counters: &mut Counters) { let buf_reader: Box = if p.extension().unwrap() == "gz" { Box::new(BufReader::new(GzDecoder::new(BufReader::new( File::open(p).unwrap(), )))) } else { Box::new(BufReader::new(File::open(p).unwrap())) }; const TS: &str = "%Y-%m-%d %H:%M:%S.%.6f %:z"; for line in buf_reader.lines() { let line = line.unwrap(); //9 fraction digits, should be 6 if let Ok(ts) = NaiveDateTime::parse_from_str(&line[7..40], TS) { let n = match &line[45..46].parse::() { Ok(n) => *n, Err(_) => continue, }; if let Some(bts) = counters.total.0 { *counters .total .1 .entry((ts - bts).num_microseconds().unwrap()) .or_insert(1) += 1; } counters.total.0 = Some(ts); if let Some(bts) = counters.threads[n].0 { *counters.threads[n] .1 .entry((ts - bts).num_microseconds().unwrap()) .or_insert(1) += 1; } counters.threads[n].0 = Some(ts); } } } fn write_csv(directory: &str, name: &str, data: &BTreeMap) { let mut path = PathBuf::from(directory); path.push(name); let mut file = std::io::BufWriter::new( std::fs::OpenOptions::new() .write(true) .create(true) .truncate(true) .open(path) .unwrap(), ); for (interval, count) in data { writeln!(file, "{interval:?};{count};").unwrap(); } } struct Counters { total: (Option, BTreeMap), threads: [(Option, BTreeMap); 5], } } flexi_logger-0.29.8/tests/test_multi_threaded_cleanup_use_utc.rs000064400000000000000000000126441046102023000234000ustar 00000000000000mod test_utils; #[cfg(feature = "compress")] mod d { use cond_sync::{CondSync, Other}; use flexi_logger::{ Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, Logger, Naming, WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, }; use glob::glob; use log::*; use std::{ops::Add, thread::JoinHandle}; const NO_OF_THREADS: usize = 5; const NO_OF_LOGLINES_PER_THREAD: usize = 20_000; const ROTATE_OVER_SIZE: u64 = 600_000; const NO_OF_LOG_FILES: usize = 2; const NO_OF_GZ_FILES: usize = 5; // we use a special log line format that starts with a special string // so that it is easier to verify that all log lines are written correctly #[test] fn multi_threaded() { let directory = super::test_utils::dir(); { let _stopwatch = super::test_utils::Stopwatch::default(); let logger = Logger::try_with_str("debug") .unwrap() .log_to_file(FileSpec::default().directory(&directory)) .write_mode(WriteMode::BufferAndFlushWith( 10 * 1024, std::time::Duration::from_millis(600), )) .format(test_format) .duplicate_to_stderr(Duplicate::Info) .rotate( Criterion::Size(ROTATE_OVER_SIZE), Naming::Timestamps, Cleanup::KeepLogAndCompressedFiles(NO_OF_LOG_FILES, NO_OF_GZ_FILES), ) .cleanup_in_background_thread(false) .use_utc() .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!( "create a huge number of log lines with a considerable number of threads, \ verify the log" ); let cond_sync = CondSync::new(0_usize); let worker_handles = start_worker_threads(NO_OF_THREADS, &cond_sync); cond_sync .wait_until(|value| *value == NO_OF_THREADS) .unwrap(); logger.set_new_spec(LogSpecification::parse("trace").unwrap()); join_all_workers(worker_handles); } // drop stopwatch and logger verify_logs(&directory.display().to_string()); } // Starts given number of worker threads and lets each execute `do_work` fn start_worker_threads( no_of_workers: usize, cond_sync: &CondSync, ) -> Vec> { let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); trace!("Starting {} worker threads", no_of_workers); for thread_number in 0..no_of_workers { trace!("Starting thread {}", thread_number); let cond_sync_t = cond_sync.clone(); worker_handles.push( std::thread::Builder::new() .name(thread_number.to_string()) .spawn(move || { do_work(thread_number, cond_sync_t); 0 }) .unwrap(), ); } trace!("All {} worker threads started.", worker_handles.len()); worker_handles } fn do_work(thread_number: usize, cond_sync: CondSync) { trace!("({}) Thread started working", thread_number); trace!("ERROR_IF_PRINTED"); cond_sync .modify_and_notify(|value| *value += 1, Other::One) .unwrap(); for idx in 0..NO_OF_LOGLINES_PER_THREAD { debug!("({}) writing out line number {}", thread_number, idx); } trace!("MUST_BE_PRINTED"); } fn join_all_workers(worker_handles: Vec>) { for worker_handle in worker_handles { worker_handle .join() .unwrap_or_else(|e| panic!("Joining worker thread failed: {e:?}")); } trace!("All worker threads joined."); } pub fn test_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> std::io::Result<()> { write!( w, "XXXXX [{}] T[{:?}] {} [{}:{}] {}", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), std::thread::current().name().unwrap_or(""), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), &record.args() ) } fn verify_logs(directory: &str) { // Since the cleanup deleted log files, we can only check that the correct number of // log files and compressed files exist let basename = String::from(directory).add("/").add( &std::path::Path::new(&std::env::args().next().unwrap()) .file_stem().unwrap(/*cannot fail*/) .to_string_lossy(), ); let fn_pattern = String::with_capacity(180) .add(&basename) .add("_r[0-9][0-9]*."); let no_of_log_files = glob(&fn_pattern.clone().add("log")) .unwrap() .map(Result::unwrap) .count(); let no_of_gz_files = glob(&fn_pattern.add("gz")) .unwrap() .map(Result::unwrap) .count(); assert_eq!(no_of_log_files, NO_OF_LOG_FILES); assert_eq!(no_of_gz_files, NO_OF_GZ_FILES); info!("Found correct number of log and compressed files"); } } flexi_logger-0.29.8/tests/test_multi_threaded_dates.rs000064400000000000000000000115321046102023000213150ustar 00000000000000mod test_utils; use cond_sync::{CondSync, Other}; use flexi_logger::{ Age, Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, Logger, Naming, TS_DASHES_BLANK_COLONS_DOT_BLANK, }; use glob::glob; use log::*; use std::{ fs::File, io::{BufRead, BufReader}, ops::Add, thread::JoinHandle, }; const NO_OF_THREADS: usize = 5; const NO_OF_LOGLINES_PER_THREAD: usize = 20_000; // we use a special log line format that starts with a special string so that it is easier to // verify that all log lines are written correctly #[test] fn test_multi_threaded_dates() { let directory = test_utils::dir(); { let _stopwatch = test_utils::Stopwatch::default(); let logger = Logger::try_with_str("debug") .unwrap() .log_to_file(FileSpec::default().directory(&directory)) .format(test_format) .create_symlink("link_to_mt_log") .duplicate_to_stderr(Duplicate::Info) .rotate( Criterion::Age(Age::Second), Naming::Timestamps, Cleanup::Never, ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!("create many log lines with a considerable number of threads, verify the log"); let cond_sync = CondSync::new(0_usize); let worker_handles = start_worker_threads(NO_OF_THREADS, &cond_sync); cond_sync .wait_until(|value| *value == NO_OF_THREADS) .unwrap(); logger.set_new_spec(LogSpecification::parse("trace").unwrap()); join_all_workers(worker_handles); } // drop stopwatch verify_logs(&directory.display().to_string()); } // Starts given number of worker threads and lets each execute `do_work` fn start_worker_threads(no_of_workers: usize, cond_sync: &CondSync) -> Vec> { let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); trace!("Starting {} worker threads", no_of_workers); for thread_number in 0..no_of_workers { trace!("Starting thread {}", thread_number); let cond_sync_t = cond_sync.clone(); worker_handles.push( std::thread::Builder::new() .name(thread_number.to_string()) .spawn(move || { do_work(thread_number, cond_sync_t); 0 }) .unwrap(), ); } trace!("All {} worker threads started.", worker_handles.len()); worker_handles } fn do_work(thread_number: usize, cond_sync: CondSync) { trace!("({}) Thread started working", thread_number); trace!("ERROR_IF_PRINTED"); cond_sync .modify_and_notify(|value| *value += 1, Other::One) .unwrap(); for idx in 0..NO_OF_LOGLINES_PER_THREAD { debug!("({}) writing out line number {}", thread_number, idx); } trace!("MUST_BE_PRINTED"); } fn join_all_workers(worker_handles: Vec>) { for worker_handle in worker_handles { worker_handle .join() .unwrap_or_else(|e| panic!("Joining worker thread failed: {e:?}")); } trace!("All worker threads joined."); } pub fn test_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> std::io::Result<()> { write!( w, "XXXXX [{}] T[{:?}] {} [{}:{}] {}", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), std::thread::current().name().unwrap_or(""), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), &record.args() ) } fn verify_logs(directory: &str) { // read all files let pattern = String::from(directory).add("/*"); let globresults = match glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}",), Ok(globresults) => globresults, }; let mut no_of_log_files = 0; let mut line_count = 0_usize; for globresult in globresults { let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let f = File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")); no_of_log_files += 1; let mut reader = BufReader::new(f); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { if buffer.starts_with("XXXXX") { line_count += 1; } else { panic!("irregular line in log file {pathbuf:?}: \"{buffer}\""); } buffer.clear(); } } assert_eq!( line_count, NO_OF_THREADS * (NO_OF_LOGLINES_PER_THREAD + 1) + 3 ); println!( "Found {line_count} log lines from {NO_OF_THREADS} threads in {no_of_log_files} files", ); } flexi_logger-0.29.8/tests/test_multi_threaded_numbers.rs000064400000000000000000000135001046102023000216650ustar 00000000000000mod test_utils; use cond_sync::{CondSync, Other}; use flexi_logger::{ Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, LogfileSelector, Logger, Naming, WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, }; use glob::glob; use log::*; use std::{ fs::File, io::{BufRead, BufReader}, ops::Add, thread::JoinHandle, }; const NO_OF_THREADS: usize = 5; const NO_OF_LOGLINES_PER_THREAD: usize = 20_000; const ROTATE_OVER_SIZE: u64 = 800_000; // we use a special log line format that starts with a special string so that it is easier to // verify that all log lines are written correctly #[test] fn multi_threaded() { test_utils::wait_for_start_of_second(); let directory = test_utils::dir(); { let logger; let _stopwatch = test_utils::Stopwatch::default(); logger = Logger::try_with_str("debug") .unwrap() .log_to_file( FileSpec::default() .basename("test_mtn") .directory(&directory), ) .write_mode(WriteMode::BufferAndFlush) .format(test_format) .duplicate_to_stderr(Duplicate::Info) .rotate( Criterion::Size(ROTATE_OVER_SIZE), Naming::Numbers, Cleanup::Never, ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!( "create a huge number of log lines with a considerable number of threads, \ verify the log" ); let logger2 = logger.clone(); let cond_sync = CondSync::new(0_usize); let worker_handles = start_worker_threads(NO_OF_THREADS, &cond_sync); cond_sync .wait_until(|value| *value == NO_OF_THREADS) .unwrap(); std::thread::Builder::new() .spawn(move || { logger2.set_new_spec(LogSpecification::parse("trace").unwrap()); 0 }) .unwrap(); wait_for_workers_to_close(worker_handles); let log_files = logger .existing_log_files( &LogfileSelector::default() .with_compressed_files() .with_r_current(), ) .unwrap(); assert_eq!(log_files.len(), 17); logger.parse_new_spec("info").unwrap(); for f in log_files { trace!("Existing log file: {f:?}"); } } verify_logs(&directory.display().to_string()); } // Starts given number of worker threads and lets each execute `do_work` fn start_worker_threads(no_of_workers: usize, cond_sync: &CondSync) -> Vec> { let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); trace!( "(should not appear) Starting {} worker threads", no_of_workers ); for thread_number in 0..no_of_workers { trace!("(should not appear) Starting thread {}", thread_number); let cond_sync_t = cond_sync.clone(); worker_handles.push( std::thread::Builder::new() .name(thread_number.to_string()) .spawn(move || { do_work(thread_number, cond_sync_t); 0 }) .unwrap(), ); } trace!( "(should not appear) All {} worker threads started.", worker_handles.len() ); worker_handles } fn do_work(thread_number: usize, cond_sync: CondSync) { trace!( "(should not appear) ({}) Thread started working", thread_number ); cond_sync .modify_and_notify(|value| *value += 1, Other::One) .unwrap(); for idx in 0..NO_OF_LOGLINES_PER_THREAD { debug!("({}) writing out line number {}", thread_number, idx); } trace!("MUST_BE_PRINTED"); } fn wait_for_workers_to_close(worker_handles: Vec>) { for worker_handle in worker_handles { worker_handle .join() .unwrap_or_else(|e| panic!("Joining worker thread failed: {e:?}")); } trace!("All worker threads joined."); } pub fn test_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> std::io::Result<()> { write!( w, "XXXXX [{}] T[{:?}] {} [{}:{}] {}", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), std::thread::current().name().unwrap_or(""), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), &record.args() ) } fn verify_logs(directory: &str) { // read all files let pattern = String::from(directory).add("/*"); let globresults = match glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}",), Ok(globresults) => globresults, }; let mut no_of_log_files = 0; let mut line_count = 0_usize; for globresult in globresults { let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let f = File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")); no_of_log_files += 1; let mut reader = BufReader::new(f); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { if buffer.starts_with("XXXXX") && !buffer.contains("should not appear") { line_count += 1; } else { panic!("irregular line in log file {pathbuf:?}: \"{buffer}\""); } buffer.clear(); } } assert_eq!( line_count, NO_OF_THREADS * NO_OF_LOGLINES_PER_THREAD + NO_OF_THREADS + 3 ); println!( "Found {line_count} log lines from {NO_OF_THREADS} threads in {no_of_log_files} files", ); } flexi_logger-0.29.8/tests/test_multi_threaded_numbers_dedup.rs000064400000000000000000000240371046102023000230550ustar 00000000000000mod test_utils; use flexi_logger::{ filter::{LogLineFilter, LogLineWriter}, Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, Logger, Naming, WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, }; use glob::glob; use log::*; use std::cmp::Ordering; use std::fs::File; use std::io::{BufRead, BufReader}; use std::num::NonZeroUsize; use std::ops::Add; use std::sync::Mutex; use std::thread::JoinHandle; const NO_OF_THREADS: usize = 5; const NO_OF_LOGLINES_PER_THREAD: usize = 20_000; const ROTATE_OVER_SIZE: u64 = 800_000; // we use a special log line format that starts with a special string so that it is easier to // verify that all log lines are written correctly #[test] fn multi_threaded() { test_utils::wait_for_start_of_second(); let directory = test_utils::dir(); { let _logger; let _stopwatch = test_utils::Stopwatch::default(); _logger = Logger::try_with_str("debug") .unwrap() .log_to_file( FileSpec::default() .basename("test_mtn") .directory(&directory), ) .write_mode(WriteMode::BufferAndFlush) .format(test_format) .duplicate_to_stderr(Duplicate::Info) .rotate( Criterion::Size(ROTATE_OVER_SIZE), Naming::Numbers, Cleanup::Never, ) .filter(Box::new(DedupWriter::with_leeway( std::num::NonZeroUsize::new(22).unwrap(), ))) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!("create a huge number of log lines, but deduplicate them"); wait_for_workers_to_close(start_worker_threads(NO_OF_THREADS)); } verify_logs(&directory.display().to_string()); } // Starts given number of worker threads and lets each execute `do_work` fn start_worker_threads(no_of_workers: usize) -> Vec> { let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); trace!("Starting {} worker threads", no_of_workers); for thread_number in 0..no_of_workers { trace!("Starting thread {}", thread_number); worker_handles.push( std::thread::Builder::new() .name(thread_number.to_string()) .spawn(move || { do_work(thread_number); 0 }) .unwrap(), ); } trace!("All {} worker threads started.", worker_handles.len()); worker_handles } fn do_work(thread_number: usize) { trace!("({}) Thread started working", thread_number); trace!("ERROR_IF_PRINTED"); for _idx in 0..NO_OF_LOGLINES_PER_THREAD { debug!("bliblablub"); } std::thread::sleep(std::time::Duration::from_millis(500)); } fn wait_for_workers_to_close(worker_handles: Vec>) { for worker_handle in worker_handles { worker_handle .join() .unwrap_or_else(|e| panic!("Joining worker thread failed: {e:?}")); } trace!("All worker threads joined."); } pub fn test_format( w: &mut dyn std::io::Write, now: &mut DeferredNow, record: &Record, ) -> std::io::Result<()> { write!( w, "XXXXX [{}] T[{:?}] {} [{}:{}] {}", now.format(TS_DASHES_BLANK_COLONS_DOT_BLANK), std::thread::current().name().unwrap_or(""), record.level(), record.file().unwrap_or(""), record.line().unwrap_or(0), &record.args() ) } /// A helper to skip duplicated consecutive log lines. pub struct DedupWriter { deduper: Mutex, } impl DedupWriter { /// Constructs a new [`Deduper`] that will skip duplicated entries after /// some record has been received for the consecutive times specified by /// `leeway`. pub fn with_leeway(leeway: NonZeroUsize) -> Self { Self { deduper: Mutex::new(Deduper::with_leeway(leeway)), } } } impl LogLineFilter for DedupWriter { fn write( &self, now: &mut DeferredNow, record: &Record, log_line_writer: &dyn LogLineWriter, ) -> std::io::Result<()> { let mut deduper = self.deduper.lock().unwrap(); let dedup_action = deduper.dedup(record); match dedup_action { DedupAction::Allow => { // Just log log_line_writer.write(now, record) } DedupAction::AllowLastOfLeeway(_) => { // Log duplicate log_line_writer.write(now, record)?; // Log warning log_line_writer.write( now, &log::Record::builder() .level(log::Level::Warn) .file_static(Some(file!())) .line(Some(line!())) .module_path_static(Some("flexi_logger")) .target("flexi_logger") .args(format_args!( "last record has been repeated consecutive times, \ following duplicates will be skipped...", )) .build(), ) } DedupAction::AllowAfterSkipped(skipped) => { // Log summary of skipped log_line_writer.write( now, &log::Record::builder() .level(log::Level::Info) .file_static(Some(file!())) .line(Some(line!())) .module_path_static(Some("flexi_logger")) .target("flexi_logger") .args(format_args!("last record was skipped {skipped} times")) .build(), )?; // Log new record log_line_writer.write(now, record) } DedupAction::Skip => Ok(()), } } } // A helper to track duplicated consecutive logs and skip them until a // different event is received. struct Deduper { leeway: NonZeroUsize, last_record: LastRecord, duplicates: usize, } /// Action to be performed for some record. #[derive(Debug, PartialEq, Eq)] enum DedupAction { /// The record should be allowed and logged normally. Allow, /// The record is the last consecutive duplicate to be allowed. /// /// Any following duplicates will be skipped until a different event is /// received (or the duplicates count overflows). AllowLastOfLeeway(usize), /// The record should be allowed, the last `N` records were skipped as /// consecutive duplicates. AllowAfterSkipped(usize), /// The record should be skipped because no more consecutive duplicates /// are allowed. Skip, } impl Deduper { // Constructs a new [`Deduper`] that will skip duplicated entries after // some record has been received for the consecutive times specified by // `leeway`. pub fn with_leeway(leeway: NonZeroUsize) -> Self { Self { leeway, last_record: LastRecord { file: None, line: None, msg: String::new(), }, duplicates: 0, } } /// Returns wether a record should be skipped or allowed. /// /// See [`DedupAction`]. fn dedup(&mut self, record: &Record) -> DedupAction { let new_line = record.line(); let new_file = record.file(); let new_msg = record.args().to_string(); if new_line == self.last_record.line && new_file == self.last_record.file.as_deref() && new_msg == self.last_record.msg { // Update dups count if let Some(updated_dups) = self.duplicates.checked_add(1) { self.duplicates = updated_dups; } else { let skipped = self.duplicates - self.leeway(); self.duplicates = 0; return DedupAction::AllowAfterSkipped(skipped); } match self.duplicates.cmp(&self.leeway()) { Ordering::Less => DedupAction::Allow, Ordering::Equal => DedupAction::AllowLastOfLeeway(self.leeway()), Ordering::Greater => DedupAction::Skip, } } else { // Update last record self.last_record.file = new_file.map(ToOwned::to_owned); self.last_record.line = new_line; self.last_record.msg = new_msg; let dups = self.duplicates; self.duplicates = 0; match dups { n if n > self.leeway() => DedupAction::AllowAfterSkipped(n - self.leeway()), _ => DedupAction::Allow, } } } fn leeway(&self) -> usize { self.leeway.get() } } struct LastRecord { file: Option, line: Option, msg: String, } fn verify_logs(directory: &str) { // read all files let pattern = String::from(directory).add("/*"); let globresults = match glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}"), Ok(globresults) => globresults, }; let mut no_of_log_files = 0; let mut line_count = 0_usize; for globresult in globresults { let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let f = File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")); no_of_log_files += 1; let mut reader = BufReader::new(f); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { if buffer.starts_with("XXXXX") { line_count += 1; } else { panic!("irregular line in log file {pathbuf:?}: \"{buffer}\""); } buffer.clear(); } } assert_eq!(line_count, 27); println!( "Found {line_count} log lines from {NO_OF_THREADS} threads in {no_of_log_files} files" ); } flexi_logger-0.29.8/tests/test_multi_threaded_stderr.rs000064400000000000000000000044211046102023000215170ustar 00000000000000mod test_utils; use flexi_logger::{Logger, WriteMode}; use log::*; use std::thread::{self, JoinHandle}; const NO_OF_THREADS: usize = 5; const NO_OF_LOGLINES_PER_THREAD: usize = 5_000; #[test] fn multi_threaded() { test_utils::wait_for_start_of_second(); let _logger = Logger::try_with_str("debug") .unwrap() .log_to_stderr() .write_mode(WriteMode::BufferAndFlushWith( 1024, std::time::Duration::from_millis(600), )) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!("create a huge number of log lines with a considerable number of threads"); for i in 0..50 { std::thread::sleep(std::time::Duration::from_millis(100)); info!("********** check delay of this log line ({}) **********", i); } let _stopwatch = test_utils::Stopwatch::default(); let worker_handles = start_worker_threads(NO_OF_THREADS); wait_for_workers_to_close(worker_handles); } // Starts given number of worker threads and lets each execute `do_work` fn start_worker_threads(no_of_workers: usize) -> Vec> { let mut worker_handles: Vec> = Vec::with_capacity(no_of_workers); trace!("Starting {} worker threads", no_of_workers); for thread_number in 0..no_of_workers { trace!("Starting thread {}", thread_number); worker_handles.push( thread::Builder::new() .name(thread_number.to_string()) .spawn(move || { do_work(thread_number); 0 }) .unwrap(), ); } trace!("All {} worker threads started.", worker_handles.len()); worker_handles } fn do_work(thread_number: usize) { trace!("({}) Thread started working", thread_number); trace!("ERROR_IF_PRINTED"); for idx in 0..NO_OF_LOGLINES_PER_THREAD { debug!("({}) writing out line number {}", thread_number, idx); } trace!("MUST_BE_PRINTED"); } fn wait_for_workers_to_close(worker_handles: Vec>) { for worker_handle in worker_handles { worker_handle .join() .unwrap_or_else(|e| panic!("Joining worker thread failed: {e:?}")); } trace!("All worker threads joined."); } flexi_logger-0.29.8/tests/test_no_logger.rs000064400000000000000000000007601046102023000171170ustar 00000000000000use flexi_logger::Logger; use log::*; #[test] fn you_must_not_see_anything() { Logger::try_with_str("trace") .unwrap() .do_not_log() .start() .unwrap(); error!("This is an error message - you must not see it!"); warn!("This is a warning - you must not see it!"); info!("This is an info message - you must not see it!"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); } flexi_logger-0.29.8/tests/test_parse_errors.rs000064400000000000000000000044531046102023000176550ustar 00000000000000use flexi_logger::{FlexiLoggerError, LogSpecification, Logger}; use log::*; #[test] fn parse_errors_logspec() { match LogSpecification::parse("info, foo=bar, fuzz=debug") .err() .unwrap() { FlexiLoggerError::Parse(_, logspec) => { assert_eq!( logspec.module_filters(), LogSpecification::parse("info, fuzz=debug") .unwrap() .module_filters() ); #[cfg(feature = "textfilter")] assert!(logspec.text_filter().is_none()); } _ => panic!("Wrong error from parsing (1)"), } match LogSpecification::parse("info, ene mene dubbedene") .err() .unwrap() { FlexiLoggerError::Parse(_, logspec) => { assert_eq!( logspec.module_filters(), LogSpecification::parse("info").unwrap().module_filters() ); #[cfg(feature = "textfilter")] assert!(logspec.text_filter().is_none()); } _ => panic!("Wrong error from parsing (2)"), } match LogSpecification::parse("ene mene dubbedene").err().unwrap() { FlexiLoggerError::Parse(_, logspec) => { assert_eq!( logspec.module_filters(), LogSpecification::off().module_filters() ); #[cfg(feature = "textfilter")] assert!(logspec.text_filter().is_none()); } _ => panic!("Wrong error from parsing (3)"), } match LogSpecification::parse("INFO, ene / mene / dubbedene") .err() .unwrap() { FlexiLoggerError::Parse(_, logspec) => { assert_eq!( logspec.module_filters(), LogSpecification::off().module_filters() ); #[cfg(feature = "textfilter")] assert!(logspec.text_filter().is_none()); } _ => panic!("Wrong error from parsing (4)"), } } #[test] fn parse_errors_logger() { let result = Logger::try_with_str("info, foo=baz"); assert!(result.is_err()); let error = result.err().unwrap(); println!("err: {error}"); Logger::try_with_str("info, foo=debug") .unwrap() .start() .unwrap(); info!("logging works"); info!("logging works"); } flexi_logger-0.29.8/tests/test_reconfigure_methods.rs000064400000000000000000000077351046102023000212100ustar 00000000000000mod test_utils; use flexi_logger::{FileSpec, Logger, LoggerHandle}; use log::*; #[test] fn test_reconfigure_methods() { let mut logger = Logger::try_with_str("info") .unwrap() .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); test_parse_new_spec(&logger); test_push_new_spec(&mut logger); validate_logs(&logger); } fn test_parse_new_spec(logger: &LoggerHandle) { error!("1-error message"); warn!("1-warning"); info!("1-info message"); debug!("1-debug message - you must not see it!"); trace!("1-trace message - you must not see it!"); logger.parse_new_spec("error").ok(); error!("1-error message"); warn!("1-warning - you must not see it!"); info!("1-info message - you must not see it!"); debug!("1-debug message - you must not see it!"); trace!("1-trace message - you must not see it!"); logger.parse_new_spec("trace").ok(); error!("1-error message"); warn!("1-warning"); info!("1-info message"); debug!("1-debug message"); trace!("1-trace message"); logger.parse_new_spec("info").ok(); } fn test_push_new_spec(logger: &mut LoggerHandle) { error!("2-error message"); warn!("2-warning"); info!("2-info message"); debug!("2-debug message - you must not see it!"); trace!("2-trace message - you must not see it!"); logger.parse_and_push_temp_spec("error").ok(); error!("2-error message"); warn!("2-warning - you must not see it!"); info!("2-info message - you must not see it!"); debug!("2-debug message - you must not see it!"); trace!("2-trace message - you must not see it!"); logger.parse_and_push_temp_spec("trace").ok(); error!("2-error message"); warn!("2-warning"); info!("2-info message"); debug!("2-debug message"); trace!("2-trace message"); logger.pop_temp_spec(); // we should be back on error error!("2-error message"); warn!("2-warning - you must not see it!"); info!("2-info message - you must not see it!"); debug!("2-debug message - you must not see it!"); trace!("2-trace message - you must not see it!"); logger.pop_temp_spec(); // we should be back on info error!("2-error message"); warn!("2-warning"); info!("2-info message"); debug!("2-debug message - you must not see it!"); trace!("2-trace message - you must not see it!"); logger.pop_temp_spec(); // should be a no-op } fn validate_logs(logger: &LoggerHandle) { logger.validate_logs(&[ ("ERROR", "test_reconfigure_methods", "1-error"), ("WARN", "test_reconfigure_methods", "1-warning"), ("INFO", "test_reconfigure_methods", "1-info"), // ("ERROR", "test_reconfigure_methods", "1-error"), // ("ERROR", "test_reconfigure_methods", "1-error"), ("WARN", "test_reconfigure_methods", "1-warning"), ("INFO", "test_reconfigure_methods", "1-info"), ("DEBUG", "test_reconfigure_methods", "1-debug"), ("TRACE", "test_reconfigure_methods", "1-trace"), // ----- ("ERROR", "test_reconfigure_methods", "2-error"), ("WARN", "test_reconfigure_methods", "2-warning"), ("INFO", "test_reconfigure_methods", "2-info"), // ("ERROR", "test_reconfigure_methods", "2-error"), // ("ERROR", "test_reconfigure_methods", "2-error"), ("WARN", "test_reconfigure_methods", "2-warning"), ("INFO", "test_reconfigure_methods", "2-info"), ("DEBUG", "test_reconfigure_methods", "2-debug"), ("TRACE", "test_reconfigure_methods", "2-trace"), // ("ERROR", "test_reconfigure_methods", "2-error"), // ("ERROR", "test_reconfigure_methods", "2-error"), ("WARN", "test_reconfigure_methods", "2-warning"), ("INFO", "test_reconfigure_methods", "2-info"), ]); } flexi_logger-0.29.8/tests/test_recursion.rs000064400000000000000000000047341046102023000171620ustar 00000000000000mod test_utils; use flexi_logger::DeferredNow; #[cfg(feature = "colors")] use flexi_logger::{colored_detailed_format, AdaptiveFormat}; use flexi_logger::{detailed_format, Duplicate, FileSpec, Logger}; use log::*; use std::sync::atomic::AtomicU32; #[test] fn test_recursion() { let logger = Logger::try_with_str("info") .unwrap() .format(detailed_format) .log_to_file(FileSpec::default().directory(self::test_utils::dir())) .duplicate_to_stderr(Duplicate::All) .duplicate_to_stdout(Duplicate::All) .print_message(); #[cfg(feature = "colors")] let logger = logger.format_for_stderr(colored_detailed_format); #[cfg(not(feature = "colors"))] let logger = logger.format_for_stderr(detailed_format); #[cfg(feature = "colors")] let logger = logger.adaptive_format_for_stdout(AdaptiveFormat::Custom(my_format, my_colored_format)); logger .start() .unwrap_or_else(|e| panic!("Logger initialization failed because: {e}")); let dummy = Dummy(); for _ in 0..10 { error!("This is an error message for {}", dummy); warn!("This is a warning for {}", dummy); info!("This is an info message for {}", dummy); debug!("This is a debug message for {}", dummy); trace!("This is a trace message for {}", dummy); } } struct Dummy(); impl std::fmt::Display for Dummy { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { static COUNT: AtomicU32 = AtomicU32::new(0); info!( "Here comes the inner message ({}):-| ", COUNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed) ); f.write_str("Dummy!!")?; Ok(()) } } #[cfg(feature = "colors")] pub fn my_colored_format( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); let style = nu_ansi_term::Style::new().fg(nu_ansi_term::Color::Fixed(165)); write!( w, "{} [{}] {}", style.paint(level.to_string()), record.module_path().unwrap_or(""), style.paint(record.args().to_string()) ) } pub fn my_format( w: &mut dyn std::io::Write, _now: &mut DeferredNow, record: &Record, ) -> Result<(), std::io::Error> { let level = record.level(); write!( w, "{} [{}] {}", level, record.module_path().unwrap_or(""), record.args() ) } flexi_logger-0.29.8/tests/test_restart_with_no_suffix.rs000064400000000000000000000044311046102023000217420ustar 00000000000000mod test_utils; use flexi_logger::{Cleanup, Criterion, FileSpec, LogfileSelector, Logger, Naming}; use log::*; const COUNT: u8 = 2; #[test] fn test_restart_with_no_suffix() { if let Some(value) = test_utils::dispatch(COUNT) { std::thread::sleep(std::time::Duration::from_millis(1000)); work(value) } } fn work(value: u8) { let directory = test_utils::dir(); let file_spec = FileSpec::default() .directory(directory.clone()) .o_suffix(match value { 0 => { println!("With suffix log"); Some("log".to_string()) } 1 => { println!("Without suffix"); None } COUNT..=u8::MAX => { unreachable!("got unexpected value {}", value) } }); let logger = Logger::try_with_str("debug") .unwrap() .log_to_file(file_spec) .rotate(Criterion::Size(100), Naming::Timestamps, Cleanup::Never) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); for i in 0..100 { error!("This is error message {i}"); std::thread::sleep(std::time::Duration::from_millis(10)); } // verify all log lines are found assert_eq!(100, test_utils::count_log_lines(&directory)); // verify that no unexpected files are found match value { 0 => assert_eq!( 0, logger .existing_log_files(&LogfileSelector::default()) .unwrap() .into_iter() .filter( |pb| pb.extension().map(|oss| oss.to_string_lossy().to_string()) != Some(String::from("log")) ) .count() ), 1 => assert_eq!( 0, logger .existing_log_files(&LogfileSelector::default()) .unwrap() .into_iter() .filter(|pb| match pb.extension() { Some(oss) => !oss.to_string_lossy().to_string().starts_with("restart"), None => false, }) .count() ), COUNT..=u8::MAX => { unreachable!("got unexpected value {}", value) } } } flexi_logger-0.29.8/tests/test_rotate_immediate_compression.rs000064400000000000000000000054731046102023000231070ustar 00000000000000mod test_utils; #[cfg(feature = "compress")] use flexi_logger::{Age, Cleanup, Criterion, Duplicate, FileSpec, Logger, Naming}; #[cfg(feature = "compress")] use log::*; #[cfg(feature = "compress")] const COUNT: u8 = 4; #[cfg(feature = "compress")] #[test] fn test_rotate_immediate_compression() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } #[cfg(feature = "compress")] fn work(value: u8) { match value { 0 => test_variant( Naming::Timestamps, Criterion::Age(Age::Second), Cleanup::KeepCompressedFiles(100), ), 1 => test_variant( Naming::TimestampsDirect, Criterion::Age(Age::Second), Cleanup::KeepCompressedFiles(100), ), 2 => test_variant( Naming::Numbers, Criterion::Age(Age::Second), Cleanup::KeepCompressedFiles(100), ), 3 => test_variant( Naming::NumbersDirect, Criterion::Age(Age::Second), Cleanup::KeepCompressedFiles(100), ), COUNT..=u8::MAX => unreachable!("asAS"), } } #[cfg(feature = "compress")] fn test_variant(naming: Naming, criterion: Criterion, cleanup: Cleanup) { use std::time::{Duration, Instant}; let directory = test_utils::dir(); test_utils::wait_for_start_of_second(); let mut written_lines = 1; { let logger = Logger::try_with_str("trace") .unwrap() .log_to_file( FileSpec::default() .directory(&directory) .suppress_basename(), ) .format_for_files(flexi_logger::detailed_format) .format_for_stderr(flexi_logger::detailed_format) .duplicate_to_stderr(Duplicate::Info) .rotate(criterion, naming, cleanup) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!( "test correct rotation by {}", match criterion { Criterion::Age(_) => "age", Criterion::AgeOrSize(_, _) => "age or size", Criterion::Size(_) => "size", } ); let start = Instant::now(); let max_runtime = Duration::from_millis(1_200); let sleep_time = Duration::from_millis(7); while Instant::now() - start < max_runtime { written_lines += 1; if written_lines % 17 == 4 { logger.trigger_rotation().unwrap(); } trace!("line_count = {written_lines}"); std::thread::sleep(sleep_time); } } let read_lines = test_utils::count_log_lines(&directory); assert_eq!( read_lines, written_lines, "wrong line count: {read_lines} instead of {written_lines}" ); } flexi_logger-0.29.8/tests/test_rotate_naming_variants.rs000064400000000000000000000140631046102023000217030ustar 00000000000000mod test_utils; use flexi_logger::{Age, Cleanup, Criterion, Duplicate, FileSpec, Logger, Naming}; use glob::glob; use log::*; use std::{ fs::File, io::{BufRead, BufReader}, ops::Add, path::Path, time::{Duration, Instant}, }; const COUNT: u8 = 14; #[test] fn test_rotate_naming_variants() { // work(6) if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let directory = test_utils::dir(); match value { 0 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::Timestamps, Criterion::AgeOrSize(Age::Second, 200), ), 1 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::TimestampsDirect, Criterion::AgeOrSize(Age::Second, 200), ), 2 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::Numbers, Criterion::AgeOrSize(Age::Second, 200), ), 3 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::NumbersDirect, Criterion::AgeOrSize(Age::Second, 200), ), 4 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::Timestamps, Criterion::Age(Age::Second), ), 5 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::TimestampsDirect, Criterion::Age(Age::Second), ), 6 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::TimestampsCustomFormat { current_infix: Some("myCURRENT"), format: "%Y-%m-%d", }, Criterion::Age(Age::Second), ), 7 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::TimestampsCustomFormat { current_infix: Some(""), format: "%Y-%m-%d_%H-%M-%S", }, Criterion::Age(Age::Second), ), 8 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::TimestampsCustomFormat { current_infix: None, format: "%Y-%m-%d_%H-%M-%S", }, Criterion::Age(Age::Second), ), 9 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::Numbers, Criterion::Age(Age::Second), ), 10 => test_variant( FileSpec::default().directory(directory.join(value.to_string())), Naming::NumbersDirect, Criterion::Age(Age::Second), ), 11 => test_variant( FileSpec::default() .directory(directory.join(value.to_string())) .suppress_basename() .suppress_timestamp() .o_discriminant(Option::::None), Naming::NumbersDirect, Criterion::Age(Age::Second), ), 12 => test_variant( FileSpec::default() .directory(directory.join(value.to_string())) .suppress_basename() .suppress_timestamp() .o_discriminant(Option::::None), Naming::Timestamps, Criterion::Age(Age::Second), ), 13 => test_variant( FileSpec::default() .directory(directory.join(value.to_string())) .suppress_basename() .suppress_timestamp() .o_discriminant(Option::::None), Naming::Numbers, Criterion::Age(Age::Second), ), COUNT..=u8::MAX => unreachable!("Wrong dispatch"), } } fn test_variant(file_spec: FileSpec, naming: Naming, criterion: Criterion) { let directory = file_spec.used_directory(); let _logger = Logger::try_with_str("trace") .unwrap() .log_to_file(file_spec) .format_for_files(flexi_logger::detailed_format) .format_for_stderr(flexi_logger::detailed_format) .duplicate_to_stderr(Duplicate::Info) .rotate(criterion, naming, Cleanup::Never) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!( "test correct rotation by {} with Naming::{naming:?} ", match criterion { Criterion::Age(_) => "age", Criterion::AgeOrSize(_, _) => "age or size", Criterion::Size(_) => "size", } ); let mut line_count = 1; let start = Instant::now(); let max_runtime = Duration::from_millis(3_000); let sleep_time = Duration::from_millis(10); while Instant::now() - start < max_runtime { trace!("{}", 'a'); line_count += 1; std::thread::sleep(sleep_time); } verify_logs(&directory, line_count); } fn verify_logs(directory: &Path, count: usize) { // read all files let pattern = directory.display().to_string().add("/*"); let globresults = match glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}",), Ok(globresults) => globresults, }; let mut total_line_count = 0_usize; for globresult in globresults.into_iter() { let mut line_count = 0_usize; let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let f = File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")); let mut reader = BufReader::new(f); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { line_count += 1; } total_line_count += line_count; } assert_eq!( total_line_count, count, "wrong line count: {total_line_count} instead of {count}" ); } flexi_logger-0.29.8/tests/test_specfile.rs000064400000000000000000000076151046102023000167440ustar 00000000000000mod test_utils; #[cfg(feature = "specfile_without_notification")] mod a { use flexi_logger::{detailed_format, FileSpec, Logger}; use log::*; use std::io::Write; const WAIT_MILLIS: u64 = 2000; /// Test of the specfile feature #[test] fn test_specfile() { let specfile = super::test_utils::file("logspec.toml"); std::fs::remove_file(&specfile).ok(); assert!(!specfile.exists()); let logger = Logger::try_with_str("info") .unwrap() .log_to_file( FileSpec::default() .directory(super::test_utils::dir()) .suppress_timestamp(), ) .format(detailed_format) .start_with_specfile(&specfile) .unwrap_or_else(|e| panic!("Logger initialization failed because: {e}")); assert!(specfile.exists()); error!("This is an error-0"); warn!("This is a warning-0"); info!("This is an info-0"); debug!("This is a debug-0"); trace!("This is a trace-0"); eprintln!( "[{}] ===== behave like many editors: rename and recreate; set to warn", super::test_utils::now_local() ); { let mut old_name = specfile.clone(); old_name.set_file_name("old_logspec.toml"); std::fs::rename(&specfile, old_name).unwrap(); let mut file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&specfile) .unwrap(); file.write_all( b" global_level = 'warn' [modules] ", ) .unwrap(); } std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); error!("This is an error-1"); warn!("This is a warning-1"); info!("This is an info-1"); debug!("This is a debug-1"); trace!("This is a trace-1"); eprintln!( "[{}] ===== truncate and rewrite; set to error", super::test_utils::now_local() ); { let mut file = std::fs::OpenOptions::new() .truncate(true) .write(true) .open(&specfile) .unwrap(); file.write_all( "\ global_level = 'error'\n\ [modules]\n\ " .as_bytes(), ) .unwrap(); } std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); error!("This is an error-2"); warn!("This is a warning-2"); info!("This is an info-2"); debug!("This is a debug-2"); trace!("This is a trace-2"); if cfg!(feature = "specfile") { eprintln!("feature is: specfile!"); logger.validate_logs(&[ ("ERROR", "test_specfile::a", "error-0"), ("WARN", "test_specfile::a", "warning-0"), ("INFO", "test_specfile::a", "info-0"), ("ERROR", "test_specfile::a", "error-1"), ("WARN", "test_specfile::a", "warning-1"), ("ERROR", "test_specfile::a", "error-2"), ]); } else { eprintln!("feature is: specfile_without_notification!"); logger.validate_logs(&[ ("ERROR", "test_specfile::a", "error-0"), ("WARN", "test_specfile::a", "warning-0"), ("INFO", "test_specfile::a", "info-0"), ("ERROR", "test_specfile::a", "error-1"), ("WARN", "test_specfile::a", "warning-1"), ("INFO", "test_specfile::a", "info-1"), ("ERROR", "test_specfile::a", "error-2"), ("WARN", "test_specfile::a", "warning-2"), ("INFO", "test_specfile::a", "info-2"), ]); } } } flexi_logger-0.29.8/tests/test_textfilter.rs000064400000000000000000000024101046102023000173300ustar 00000000000000mod test_utils; #[test] #[cfg(feature = "textfilter")] fn test_textfilter() { use flexi_logger::{default_format, FileSpec, LogSpecification, Logger}; use log::*; let logspec = LogSpecification::parse("info/Hello").unwrap(); let logger = Logger::with(logspec) .format(default_format) .print_message() .log_to_file( FileSpec::default() .directory(self::test_utils::dir()) .suppress_timestamp(), ) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); error!("Hello, this is an error message"); warn!("This is a warning! Hello!!"); info!("Hello, this is an info message! Hello"); debug!("Hello, this is a debug message - you must not see it!"); trace!("Hello, this is a trace message - you must not see it!"); logger.validate_logs(&[ ("ERROR", "test_textfilter", "Hello, this"), ("WARN", "test_textfilter", "! Hello!!"), ("INFO", "test_textfilter", "! Hello"), ]); } flexi_logger-0.29.8/tests/test_trc.rs000064400000000000000000000056461046102023000157440ustar 00000000000000mod test_utils; #[cfg(feature = "trc")] mod a { use flexi_logger::{ writers::FileLogWriter, Age, Cleanup, Criterion, FileSpec, LogSpecification, Naming, WriteMode, }; use std::io::Write; use tracing::{debug, error, info, trace, warn}; const WAIT_MILLIS: u64 = 2000; /// Test of the specfile feature #[test] fn test_specfile() { let specfile = super::test_utils::file("logspec.toml"); std::fs::remove_file(&specfile).ok(); assert!(!specfile.exists()); let _keep_alive_handles = flexi_logger::trc::setup_tracing( LogSpecification::info(), Some(&specfile), FileLogWriter::builder(FileSpec::default().directory(super::test_utils::dir())) .rotate( Criterion::Age(Age::Day), Naming::Timestamps, Cleanup::KeepLogFiles(7), ) .write_mode(WriteMode::Async), ) .unwrap(); assert!(specfile.exists()); error!("This is an error-0"); warn!("This is a warning-0"); info!("This is an info-0"); debug!("This is a debug-0"); trace!("This is a trace-0"); eprintln!( "[{}] ===== behave like many editors: rename and recreate; set to warn", super::test_utils::now_local() ); { let mut old_name = specfile.clone(); old_name.set_file_name("old_logspec.toml"); std::fs::rename(&specfile, old_name).unwrap(); let mut file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&specfile) .unwrap(); file.write_all( b" global_level = 'warn' [modules] ", ) .unwrap(); } std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); error!("This is an error-1"); warn!("This is a warning-1"); info!("This is an info-1"); debug!("This is a debug-1"); trace!("This is a trace-1"); eprintln!( "[{}] ===== truncate and rewrite; set to error", super::test_utils::now_local() ); { let mut file = std::fs::OpenOptions::new() .truncate(true) .write(true) .open(&specfile) .unwrap(); file.write_all( "\ global_level = 'error'\n\ [modules]\n\ " .as_bytes(), ) .unwrap(); } std::thread::sleep(std::time::Duration::from_millis(WAIT_MILLIS)); error!("This is an error-2"); warn!("This is a warning-2"); info!("This is an info-2"); debug!("This is a debug-2"); trace!("This is a trace-2"); } } flexi_logger-0.29.8/tests/test_trigger_rotation.rs000064400000000000000000000055521046102023000205320ustar 00000000000000mod test_utils; use std::{ fs::File, io::{BufRead, BufReader}, ops::Add, path::Path, }; use flexi_logger::{Cleanup, Criterion, Duplicate, FileSpec, Logger, Naming}; use glob::glob; use log::*; const COUNT: u8 = 4; #[test] fn test_rotate_naming_variants() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { match value { 0 => test_variant(Naming::Timestamps, Criterion::Size(800)), 1 => test_variant(Naming::TimestampsDirect, Criterion::Size(800)), 2 => test_variant(Naming::Numbers, Criterion::Size(800)), 3 => test_variant(Naming::NumbersDirect, Criterion::Size(800)), COUNT..=u8::MAX => unreachable!("asAS"), } } fn test_variant(naming: Naming, criterion: Criterion) { let directory = test_utils::dir(); std::thread::sleep(std::time::Duration::from_millis(500)); test_utils::wait_for_start_of_second(); let logger = Logger::try_with_str("trace") .unwrap() .log_to_file(FileSpec::default().directory(&directory)) .format_for_files(flexi_logger::detailed_format) .format_for_stderr(flexi_logger::detailed_format) .duplicate_to_stderr(Duplicate::Info) .rotate(criterion, naming, Cleanup::Never) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); info!("test trigger rotation",); let mut line_count = 1; for i in 0..45 { if i == 12 { logger.trigger_rotation().unwrap(); } trace!("{}", 'a'); line_count += 1; } verify_logs(&directory, line_count, 7); } fn verify_logs(directory: &Path, line_count: usize, file_count: usize) { // read all files let pattern = directory.display().to_string().add("/*"); let globresults = match glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}",), Ok(globresults) => globresults, }; let mut total_line_count = 0_usize; let mut total_file_count = 0_usize; for globresult in globresults.into_iter() { total_file_count += 1; let mut current_line_count = 0_usize; let pathbuf = globresult.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let f = File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")); let mut reader = BufReader::new(f); let mut buffer = String::new(); while reader.read_line(&mut buffer).unwrap() > 0 { current_line_count += 1; } total_line_count += current_line_count; } assert_eq!( total_line_count, line_count, "wrong line count: {total_line_count} instead of {line_count}" ); assert_eq!( total_file_count, file_count, "wrong file count: {total_file_count} instead of {file_count}" ); } flexi_logger-0.29.8/tests/test_utils.rs000064400000000000000000000106201046102023000163000ustar 00000000000000#![allow(dead_code)] use chrono::{DateTime, Local}; use either::Either; use flate2::read::GzDecoder; #[cfg(feature = "compress")] use std::ffi::OsStr; use std::{ fs::File, io::{BufRead, BufReader}, ops::Add, path::{Path, PathBuf}, }; const CTRL_INDEX: &str = "CTRL_INDEX"; pub fn file(filename: &str) -> PathBuf { let mut f = dir(); f.push(filename); f } const TS: &str = "%Y-%m-%d_%H-%M-%S"; pub fn dir() -> PathBuf { let mut d = PathBuf::new(); d.push("log_files"); add_prog_name(&mut d); d.push(now_local().format(TS).to_string()); d } fn add_prog_name(pb: &mut PathBuf) { let path = PathBuf::from(std::env::args().next().unwrap()); let filename = path.file_stem().unwrap(/*ok*/).to_string_lossy(); let (progname, _) = filename.rsplit_once('-').unwrap_or((&filename, "")); pb.push(progname); } // launch child processes from same executable and set for each of them an environment variable // with a specific number, and then return None, // or, in child processes, find this environment variable and return its value pub fn dispatch(count: u8) -> Option { match std::env::var(CTRL_INDEX) { Err(_) => { println!("dispatcher"); let progname = std::env::args().next().unwrap(); let nocapture = std::env::args().any(|a| a == "--nocapture"); for value in 0..count { let mut command = std::process::Command::new(progname.clone()); if nocapture { command.arg("--nocapture"); } let status = command .env(CTRL_INDEX, value.to_string()) .status() .expect("Command failed to start"); assert!(status.success()); } None } Ok(value) => { println!("executor {value}"); Some(value.parse().unwrap()) } } } #[must_use] pub fn now_local() -> DateTime { Local::now() } pub struct Stopwatch(DateTime); impl Default for Stopwatch { fn default() -> Self { Stopwatch(now_local()) } } impl Drop for Stopwatch { fn drop(&mut self) { log::info!( "Task executed in {} ms.", (now_local() - self.0).num_milliseconds() ); } } pub fn wait_for_start_of_second() { loop { let ms = Local::now().timestamp_subsec_millis(); if ms < 50 { break; } else { std::thread::sleep(std::time::Duration::from_millis((1010_u32 - ms).into())); } } } pub fn wait_for_end_of_second() { loop { let ms = Local::now().timestamp_subsec_millis(); if ms > 980 { break; } else { std::thread::sleep(std::time::Duration::from_millis((990_u32 - ms).into())); } } } // Count all log lines written in all files in the given folder // ".gz" files are decompressed first pub fn count_log_lines(directory: &Path) -> usize { // read all files let pattern = directory.display().to_string().add("/*"); let all_files = match glob::glob(&pattern) { Err(e) => panic!("Is this ({pattern}) really a directory? Listing failed with {e}",), Ok(globresults) => globresults, }; let mut total_line_count = 0_usize; for file in all_files.into_iter() { let pathbuf = file.unwrap_or_else(|e| panic!("Ups - error occured: {e}")); let mut reader: Either>, BufReader> = match pathbuf.extension() { #[cfg(feature = "compress")] Some(os_str) if os_str == AsRef::::as_ref("gz") => { // unpack Either::Left(BufReader::new(GzDecoder::new( File::open(&pathbuf) .unwrap_or_else(|e| panic!("Cannot open file {pathbuf:?} due to {e}")), ))) } _ => { Either::Right(BufReader::new(File::open(&pathbuf).unwrap_or_else(|e| { panic!("Cannot open file {pathbuf:?} due to {e}") }))) } }; let mut buffer = String::new(); let mut line_count = 0_usize; while reader.read_line(&mut buffer).unwrap() > 0 { line_count += 1; } total_line_count += line_count; } total_line_count } flexi_logger-0.29.8/tests/test_windows_line_ending.rs000064400000000000000000000037411046102023000211730ustar 00000000000000mod test_utils; use flexi_logger::{detailed_format, FileSpec, Logger, LoggerHandle}; use log::*; #[test] fn test_mods() { let handle: LoggerHandle = Logger::try_with_env_or_str( "info, test_windows_line_ending::mymod1=debug, test_windows_line_ending::mymod2=error", ) .unwrap() .format(detailed_format) .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .use_windows_line_ending() .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {e}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); mymod1::test_traces(); mymod2::test_traces(); handle.validate_logs(&[ ("ERROR", "test_windows_line_ending", "error"), ("WARN", "test_windows_line_ending", "warning"), ("INFO", "test_windows_line_ending", "info"), ("ERROR", "test_windows_line_ending", "error"), ("WARN", "test_windows_line_ending", "warning"), ("INFO", "test_windows_line_ending", "info"), ("DEBUG", "test_windows_line_ending", "debug"), ("ERROR", "test_windows_line_ending", "error"), ]); } mod mymod1 { use log::*; pub fn test_traces() { error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message"); trace!("This is a trace message - you must not see it!"); } } mod mymod2 { use log::*; pub fn test_traces() { error!("This is an error message"); warn!("This is a warning - you must not see it!"); info!("This is an info message - you must not see it!"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); } } flexi_logger-0.29.8/tests/test_write_modes.rs000064400000000000000000000100421046102023000174570ustar 00000000000000mod test_utils; use flexi_logger::{opt_format, FileSpec, Logger, WriteMode}; use log::*; const COUNT: u8 = 12; #[test] fn test_write_modes() { if let Some(value) = test_utils::dispatch(COUNT) { work(value) } } fn work(value: u8) { let logger = Logger::try_with_str("info").unwrap().format(opt_format); let logger = match value { 0 => { println!("stdout, direct"); logger.log_to_stdout().write_mode(WriteMode::Direct) } 1 => { println!("stdout, buffer+flush"); logger.log_to_stdout().write_mode(WriteMode::BufferAndFlush) } 2 => { #[cfg(feature = "async")] { println!("stdout, async"); logger.log_to_stdout().write_mode(WriteMode::Async) } #[cfg(not(feature = "async"))] { println!("!!! nothing done !!!"); return; } } 3 => { println!("stdout, buffer no flush"); logger .log_to_stdout() .write_mode(WriteMode::BufferDontFlush) } 4 => { println!("stderr, direct"); logger.log_to_stderr().write_mode(WriteMode::Direct) } 5 => { println!("stderr, buffer+flush"); logger.log_to_stderr().write_mode(WriteMode::BufferAndFlush) } 6 => { #[cfg(feature = "async")] { println!("stderr, async"); logger.log_to_stderr().write_mode(WriteMode::Async) } #[cfg(not(feature = "async"))] { println!("!!! nothing done !!!"); return; } } 7 => { println!("stderr, buffer no flush"); logger .log_to_stderr() .write_mode(WriteMode::BufferDontFlush) } 8 => { println!("file, direct"); logger .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .write_mode(WriteMode::Direct) } 9 => { println!("file, buffer+flush"); logger .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .write_mode(WriteMode::BufferAndFlush) } 10 => { #[cfg(feature = "async")] { println!("file, async"); logger .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .write_mode(WriteMode::Async) } #[cfg(not(feature = "async"))] { println!("!!! nothing done !!!"); return; } } 11 => { println!("file, buffer no flush"); logger .log_to_file( FileSpec::default() .suppress_timestamp() .directory(self::test_utils::dir()), ) .write_mode(WriteMode::BufferDontFlush) } COUNT..=u8::MAX => { unreachable!("got unexpected value {}", value) } }; let handle = logger.start().unwrap_or_else(|e| panic!("{e}, {e:?}")); error!("This is an error message"); warn!("This is a warning"); info!("This is an info message"); debug!("This is a debug message - you must not see it!"); trace!("This is a trace message - you must not see it!"); handle.validate_logs(&[ ("ERROR", "test_write_mode", "error"), ("WARN", "test_write_mode", "warning"), ("INFO", "test_write_mode", "info"), ]); }