pax_global_header00006660000000000000000000000064147301052340014511gustar00rootroot0000000000000052 comment=82eab13ecec99f34e0f1d5dac490611b604406c9 libvpl-tools-1.3.0/000077500000000000000000000000001473010523400141405ustar00rootroot00000000000000libvpl-tools-1.3.0/.clang-format000066400000000000000000000072201473010523400165140ustar00rootroot00000000000000--- Language: Cpp AccessModifierOffset: -4 AlignAfterOpenBracket: Align AlignConsecutiveAssignments: true AlignConsecutiveDeclarations: false AlignConsecutiveMacros: true AlignEscapedNewlines: Left AlignOperands: true AlignTrailingComments: false AllowAllArgumentsOnNextLine: false AllowAllConstructorInitializersOnNextLine: false AllowAllParametersOfDeclarationOnNextLine: false AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false AllowShortFunctionsOnASingleLine: Empty AllowShortIfStatementsOnASingleLine: Never AllowShortLambdasOnASingleLine: None AllowShortLoopsOnASingleLine: false AlwaysBreakAfterDefinitionReturnType: None AlwaysBreakAfterReturnType: None AlwaysBreakBeforeMultilineStrings: false AlwaysBreakTemplateDeclarations: true BinPackArguments: false BinPackParameters: false BraceWrapping: AfterCaseLabel: false AfterClass: false AfterControlStatement: false AfterEnum: false AfterFunction: false AfterNamespace: false AfterObjCDeclaration: false AfterStruct: false AfterUnion: false AfterExternBlock: false BeforeCatch: true BeforeElse: true IndentBraces: false SplitEmptyFunction: false SplitEmptyRecord: false SplitEmptyNamespace: false BreakBeforeBinaryOperators: None BreakBeforeBraces: Custom BreakBeforeTernaryOperators: true BreakConstructorInitializers: BeforeColon BreakInheritanceList: BeforeColon BreakStringLiterals: false ColumnLimit: 100 CommentPragmas: '^ IWYU pragma:' CompactNamespaces: false ConstructorInitializerAllOnOneLineOrOnePerLine: true ConstructorInitializerIndentWidth: 8 ContinuationIndentWidth: 4 Cpp11BracedListStyle: false DerivePointerAlignment: false DisableFormat: false FixNamespaceComments: true ForEachMacros: - foreach - Q_FOREACH - BOOST_FOREACH IncludeBlocks: Preserve IncludeCategories: - Regex: '^' Priority: 2 - Regex: '^<.*\.h>' Priority: 1 - Regex: '^<.*' Priority: 2 - Regex: '.*' Priority: 3 IncludeIsMainRegex: '([-_](test|unittest))?$' IndentCaseLabels: true IndentPPDirectives: BeforeHash IndentWidth: 4 IndentWrappedFunctionNames: false KeepEmptyLinesAtTheStartOfBlocks: false MacroBlockBegin: '' MacroBlockEnd: '' MaxEmptyLinesToKeep: 1 NamespaceIndentation: None PenaltyBreakAssignment: 2 PenaltyBreakBeforeFirstCallParameter: 1 PenaltyBreakComment: 300 PenaltyBreakFirstLessLess: 120 PenaltyBreakString: 1000 PenaltyBreakTemplateDeclaration: 10 PenaltyExcessCharacter: 1000000 PenaltyReturnTypeOnItsOwnLine: 200 PointerAlignment: Right RawStringFormats: - Language: Cpp Delimiters: - cc - CC - cpp - Cpp - CPP - 'c++' - 'C++' CanonicalDelimiter: '' BasedOnStyle: google - Language: TextProto Delimiters: - pb - PB - proto - PROTO EnclosingFunctions: - EqualsProto - EquivToProto - PARSE_PARTIAL_TEXT_PROTO - PARSE_TEST_PROTO - PARSE_TEXT_PROTO - ParseTextOrDie - ParseTextProtoOrDie CanonicalDelimiter: '' BasedOnStyle: google ReflowComments: false SortIncludes: true SortUsingDeclarations: false SpaceAfterCStyleCast: false SpaceAfterLogicalNot: false SpaceAfterTemplateKeyword: true SpaceBeforeAssignmentOperators: true SpaceBeforeCpp11BracedList: false SpaceBeforeCtorInitializerColon: true SpaceBeforeInheritanceColon: true SpaceBeforeParens: ControlStatements SpaceBeforeRangeBasedForLoopColon: true SpaceInEmptyParentheses: false SpacesBeforeTrailingComments: 1 SpacesInAngles: false SpacesInContainerLiterals: false SpacesInCStyleCastParentheses: false SpacesInParentheses: false SpacesInSquareBrackets: false Standard: Cpp11 StatementMacros: - Q_UNUSED - QT_REQUIRE_VERSION TabWidth: 1 UseTab: Never ... libvpl-tools-1.3.0/.cmake-format000066400000000000000000000110061473010523400165050ustar00rootroot00000000000000 # -------------------------- # General Formatting Options # -------------------------- # How wide to allow formatted cmake files line_width = 80 # How many spaces to tab for indent tab_size = 2 # If an argument group contains more than this many sub-groups (parg or kwarg # groups), then force it to a vertical layout. max_subgroups_hwrap = 2 # If a positional argument group contains more than this many arguments, then # force it to a vertical layout. max_pargs_hwrap = 6 # If true, separate flow control names from their parentheses with a space separate_ctrl_name_with_space = False # If true, separate function names from parentheses with a space separate_fn_name_with_space = False # If a statement is wrapped to more than one line, than dangle the closing # parenthesis on it's own line. dangle_parens = False # If the trailing parenthesis must be 'dangled' on it's on line, then align it # to this reference: `prefix`: the start of the statement, `prefix-indent`: the # start of the statement, plus one indentation level, `child`: align to the # column of the arguments dangle_align = 'prefix' min_prefix_chars = 4 # If the statement spelling length (including space and parenthesis) is larger # than the tab width by more than this amount, then force reject un-nested # layouts. max_prefix_chars = 10 # If a candidate layout is wrapped horizontally but it exceeds this many lines, # then reject the layout. max_lines_hwrap = 2 # What style line endings to use in the output. line_ending = 'unix' # Format command names consistently as 'lower' or 'upper' case command_case = 'canonical' # Format keywords consistently as 'lower' or 'upper' case keyword_case = 'unchanged' # Specify structure for custom cmake functions additional_commands = { "foo": { "flags": [ "BAR", "BAZ" ], "kwargs": { "HEADERS": "*", "SOURCES": "*", "DEPENDS": "*" } } } # A list of command names which should always be wrapped always_wrap = [] # If true, the argument lists which are known to be sortable will be sorted # lexicographicall enable_sort = True # If true, the parsers may infer whether or not an argument list is sortable # (without annotation). autosort = False # If a comment line starts with at least this many consecutive hash characters, # then don't lstrip() them off. This allows for lazy hash rulers where the first # hash char is not separated by space hashruler_min_length = 10 # By default, if cmake-format cannot successfully fit everything into the # desired linewidth it will apply the last, most agressive attempt that it made. # If this flag is True, however, cmake-format will print error, exit with non- # zero status code, and write-out nothing require_valid_layout = False # A dictionary containing any per-command configuration overrides. Currently # only `command_case` is supported. per_command = {} # A dictionary mapping layout nodes to a list of wrap decisions. See the # documentation for more information. layout_passes = {} # -------------------------- # Comment Formatting Options # -------------------------- # What character to use for bulleted lists bullet_char = '*' # What character to use as punctuation after numerals in an enumerated list enum_char = '.' # enable comment markup parsing and reflow enable_markup = True # If comment markup is enabled, don't reflow the first comment block in each # listfile. Use this to preserve formatting of your copyright/license # statements. first_comment_is_literal = False # If comment markup is enabled, don't reflow any comment block which matches # this (regex) pattern. Default is `None` (disabled). literal_comment_pattern = None # Regular expression to match preformat fences in comments # default=r'^\s*([`~]{3}[`~]*)(.*)$' fence_pattern = '^\\s*([`~]{3}[`~]*)(.*)$' # Regular expression to match rulers in comments # default=r'^\s*[^\w\s]{3}.*[^\w\s]{3}$' ruler_pattern = '^\\s*[^\\w\\s]{3}.*[^\\w\\s]{3}$' # If true, then insert a space between the first hash char and remaining hash # chars in a hash ruler, and normalize it's length to fill the column canonicalize_hashrulers = True # --------------------------------- # Miscellaneous Options # --------------------------------- # If true, emit the unicode byte-order mark (BOM) at the start of the file emit_byteorder_mark = False # Specify the encoding of the input file. Defaults to utf-8. input_encoding = 'utf-8' # Specify the encoding of the output file. Defaults to utf-8. Note that cmake # only claims to support utf-8 so be careful when using anything else output_encoding = 'utf-8' libvpl-tools-1.3.0/.gitattributes000066400000000000000000000006211473010523400170320ustar00rootroot00000000000000# do not rely on windows users to remember to set core.autocrlf=true * text=auto # force line endings for scripts *.sh eol=lf *.sh.in eol=lf *.py eol=lf script/* eol=lf *.bat eol=crlf # force lf endings for yaml *.yaml eol=lf *.yml eol=lf .clang-format eol=lf # force encoding for json *.json encoding=utf-8 # force lf endings for CMake files CMakeLists.* eol=lf *.cmake eol=lf *.cmake.in eol=lf libvpl-tools-1.3.0/.github/000077500000000000000000000000001473010523400155005ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/CODEOWNERS000066400000000000000000000002361473010523400170740ustar00rootroot00000000000000# Each line is a file pattern followed by one or more owners. # last matching pattern takes the most precedence. * @intel-innersource/onevpl-maintainerlibvpl-tools-1.3.0/.github/pull_request_template.md000066400000000000000000000004271473010523400224440ustar00rootroot00000000000000## Issue ## Solution ## How Tested libvpl-tools-1.3.0/.github/workflows/000077500000000000000000000000001473010523400175355ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/acceptance.yml000066400000000000000000000135561473010523400223600ustar00rootroot00000000000000--- name: Acceptance permissions: read-all on: workflow_call: inputs: os: description: 'Operating system' required: false default: 'Linux' type: string build_type: description: 'Build type (Release, Debug, RelWithDebInfo, etc.)' required: false default: 'Release' type: string ref: description: 'The branch, tag or SHA of tests' required: false default: '' type: string test_ref: description: 'The branch, tag or SHA of tests' required: false default: '' type: string lib_artifact: description: 'Artifact containing lib+dev package' required: true type: string tools_artifact: description: 'Artifact containing tools package' required: true type: string gpu: description: 'GPU Family' required: false default: 'gen12.5' type: string distro_family: description: 'OS distro family' required: false type: string distro_version: description: 'OS distro version' required: false type: string env: DISTRO_FAMILY: ${{ inputs.distro_family || 'windows' }} DISTRO_VERSION: ${{ inputs.distro_version || '11' }} IS_DEBUG: ${{ inputs.build_type == 'Debug' && 1 || 0 }} GPU: ${{ inputs.gpu || 'gen12.5' }} jobs: acceptance: runs-on: - "self-hosted" - "${{ inputs.os || 'Linux' }}" - "${{ inputs.gpu || 'gen12.5' }}" steps: - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* - name: Checkout tests uses: actions/checkout@v4 with: path: tests ref: ${{ inputs.test_ref || 'main' }} repository: ${{ vars.TEST_REPO }} token: ${{ secrets.TEST_REPO_TOKEN }} - name: Download lib+dev package uses: actions/download-artifact@v4 with: name: ${{ inputs.lib_artifact }} path: package - name: Download tools package uses: actions/download-artifact@v4 with: name: ${{ inputs.tools_artifact }} path: package - name: Extract package (Linux) if: success() && runner.os == 'Linux' run: unzip -o package/\*.zip -d _install - name: Extract package (Windows) if: success() && runner.os == 'Windows' run: > Get-ChildItem 'package' -Filter *.zip | Expand-Archive -DestinationPath '_install' -Force - name: Build Docker image for GPU testing (Linux) if: success() && runner.os == 'Linux' run: | cd tests/environments DISTRO=${{ env.DISTRO_FAMILY }}${{ env.DISTRO_VERSION }} docker build . -f Dockerfile.$DISTRO.gputest -t vpl_gputestenv:$DISTRO - name: Test package (Linux) if: success() && runner.os == 'Linux' run: | cat >test.sh < ${{ format('{0}{1}-{2}-{3}-acceptance', env.DISTRO_FAMILY, env.DISTRO_VERSION, inputs.build_type || 'Release', inputs.gpu || 'gen12.5' ) }} path: tests/logs/* - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* libvpl-tools-1.3.0/.github/workflows/bdba.yml000066400000000000000000000043041473010523400211510ustar00rootroot00000000000000--- name: Vulnerability scan permissions: read-all on: workflow_call: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string version: description: 'String indicating version of uploaded scan' required: true type: string pattern: description: 'Glob pattern to the artifacts that should be scanned' required: true type: string jobs: scan: runs-on: [self-hosted, linux] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout scripts and dispositions uses: actions/checkout@v4 with: path: source - name: Download artifacts to scan uses: actions/download-artifact@v4 with: path: bdba pattern: ${{ inputs.pattern }} merge-multiple: true - name: Create archive to scan run: | # cp infrastructure/config/.bdba.yaml bdba/ pushd bdba zip --symlinks -r ../vpl-${{ inputs.output_prefix }}.zip . popd - name: Build Docker image run: > docker build "source/.github/workflows/bdba" -f "source/.github/workflows/bdba/Dockerfile.ubuntu.bdba" -t vpl_bdba:ubuntu - name: Scan package run: | cat >scan.sh < ${{ inputs.output_prefix }}results.json EOL chmod a+x scan.sh docker run --rm -v $(pwd):/tmp/work -w /tmp/work \ vpl_bdba:ubuntu ./scan.sh - name: Upload scan results uses: actions/upload-artifact@v4 if: success() || failure() with: name: ${{ inputs.output_prefix }}bdba-scan path: | *.csv *.json *.pdf - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/bdba/000077500000000000000000000000001473010523400204255ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/bdba/Dockerfile.ubuntu.bdba000066400000000000000000000013111473010523400246230ustar00rootroot00000000000000# ============================================================================== # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ============================================================================== ARG DOCKER_REGISTRY FROM ${DOCKER_REGISTRY}ubuntu:22.04 WORKDIR /setup COPY requirements.txt requirements.txt COPY packages.txt packages.txt ARG DEBIAN_FRONTEND=noninteractive ENV PIP_ROOT_USER_ACTION=ignore RUN apt-get update \ && xargs -a packages.txt apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir -U -r requirements.txt \ && git config --global safe.directory '*' HEALTHCHECK CMD python3 --version || exit 1 libvpl-tools-1.3.0/.github/workflows/bdba/bdba.py000066400000000000000000000232411473010523400216710ustar00rootroot00000000000000#!/usr/bin/env python ############################################################################ # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################ """Black Duck Binary Analysis Tool This tool allows uploading scan results to BDBA and downloading results needed for SDL. It returns 0 on success. """ # import modules used here -- sys is a very standard one import argparse import logging import sys import os from contextlib import contextmanager import json import time import requests from requests.adapters import HTTPAdapter from urllib3.util import Retry PROGNAME = "bdba" SERVER = "https://bdba001.icloud.intel.com" POLLING_INTERVAL = 10 def fail(*args, **kwargs): """Exit with error message.""" print( sys.argv[0] + ": ", end='', file=sys.stderr, ) print(*args, file=sys.stderr, flush=True, **kwargs) sys.exit(-1) @contextmanager def restrict_output(): """Define context manager to restrict output.""" with open(os.devnull, "w", encoding='utf-8') as devnull: standard_error = sys.stderr sys.stderr = devnull try: yield devnull finally: sys.stderr = standard_error def requests_with_retry(retries=3, backoff_factor=0.3, status_forcelist=(500, 502, 504), session=None): """Return request session with retry.""" session = session or requests.Session() retry = Retry(total=retries, read=retries, connect=retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) return session def format_attributes(**attributes): """Return a string of comma-separated key-value pairs.""" return ", ".join(f"{param}: {value}" for param, value in attributes.items()) def upload(filename, credentials, server_url=SERVER, **kwargs): """Upload a new application and return product_id. """ logging.debug("Uploading application to server") if not os.path.isfile(filename): fail(f"cannot access `{filename}`: No such file") archive_name = os.path.basename(filename) headers = {} group = '' if 'group' in kwargs: group = kwargs['group'] headers['Group'] = group if 'version' in kwargs: version = kwargs['version'] headers['Version'] = version try: with restrict_output(): with open(filename, 'rb') as binary: response_upload = requests_with_retry().put( f'{server_url}/api/upload/{archive_name}', data=binary, auth=(credentials['user'], credentials['password']), verify=False, headers=headers) response_upload.raise_for_status() logging.info("%s has been uploaded to %s %s", archive_name, server_url, headers) logging.debug("response: %s", response_upload.text) except requests.exceptions.RequestException as err: fail(err) product_id = report_url = product_name = None try: parsed_string = json.loads(response_upload.text) product_id = parsed_string['results']['product_id'] report_url = parsed_string['results']['report_url'] product_name = os.path.splitext( os.path.basename(parsed_string['results']['filename']))[0] except json.JSONDecodeError: fail(f"failed to parse response: " f"Product ID: '{product_id}' " f"Product URL: '{report_url} " f"Product name: '{product_name}'") else: logging.info( "response: Product ID: '%s' Product URL: '%s Product name: '%s'", product_id, report_url, product_name) return product_id def wait_for_results(product_id, credentials, server_url=SERVER): """Waits for results and returns parsed json string. """ print("Waiting for analysis results to be ready", file=sys.stderr, end='', flush=True) status = "B" while status == "B": try: with restrict_output(): response_info = requests_with_retry().get( f'{server_url}/api/product/{product_id}/', auth=(credentials['user'], credentials['password']), verify=False) response_info.raise_for_status() except requests.exceptions.RequestException as err: fail(err) logging.debug("response: %s", response_info.text) try: parsed_string = json.loads(response_info.text) status = parsed_string['results']['status'] except json.JSONDecodeError: fail("failed to parse response: status {status}") if status == "B": print(".", file=sys.stderr, end='', flush=True) time.sleep(POLLING_INTERVAL) print("", file=sys.stderr, flush=True) if status == 'F': fail("Analysis failed on server") return parsed_string def get_components(product_id, credentials, server_url=SERVER, filename=None): """Retrieve components as CSV. """ if not filename: return logging.debug("Retrieving components as CSV") try: with restrict_output(): response = requests_with_retry().get( f'{server_url}/api/product/{product_id}/csv-libs', auth=(credentials['user'], credentials['password']), verify=False) response.raise_for_status() except requests.exceptions.RequestException as err: fail(err) logging.debug("response: %s", response.text) try: with open(filename, 'wb') as components_csv: components_csv.write(response.content) except IOError as err: fail(f"cannot write `{filename}`: {os.strerror(err.errno)}") def get_vulns(product_id, credentials, server_url=SERVER, filename=None): """Retrieve known vulnerabilities as CSV. """ if not filename: return logging.debug("Retrieving known vulnerabilities as CSV") try: with restrict_output(): response = requests_with_retry().get( f'{server_url}/api/product/{product_id}/csv-vulns?cvss_version=3', auth=(credentials['user'], credentials['password']), verify=False) response.raise_for_status() except requests.exceptions.RequestException as err: fail(err) logging.debug("response: %s", response.text) try: with open(filename, 'wb') as vulns_csv: vulns_csv.write(response.content) except IOError as err: fail(f"cannot write `{filename}`: {os.strerror(err.errno)}") def get_summary(product_id, credentials, server_url=SERVER, filename=None): """Retrieve analysis summary as PDF. """ if filename: logging.debug("Retrieving analysis summary as PDF") try: with restrict_output(): response = requests_with_retry().get( f'{server_url}/api/product/{product_id}/pdf-report?cvss_version=3', auth=(credentials['user'], credentials['password']), verify=False) response.raise_for_status() except requests.exceptions.RequestException as err: fail(err) logging.debug("response: %s", response.text) try: with open(filename, 'wb') as report_csv: report_csv.write(response.content) except IOError as err: fail(f"cannot write `{filename}`: {os.strerror(err.errno)}") def main(): """Gather our code in a main() function""" parser = argparse.ArgumentParser( prog=PROGNAME, description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('BINARY', type=str) parser.add_argument('-u', '--user', type=str, required=True) parser.add_argument('-p', '--password', type=str, required=True) parser.add_argument('-g', '--group', type=str, required=True) parser.add_argument('-V', '--version', type=str) parser.add_argument('-r', '--resultfile', type=str) parser.add_argument('-c', '--componentfile', type=str) parser.add_argument('-v', '--vulnfile', type=str) parser.add_argument('--debug', action='store_true') args = parser.parse_args() if args.debug: logging.basicConfig(format='%(levelname)s:%(message)s', stream=sys.stderr, level=logging.DEBUG) else: logging.basicConfig(format='%(levelname)s:%(message)s', stream=sys.stderr, level=logging.INFO) credentials = {'user': args.user, 'password': args.password} product_id = upload(args.BINARY, credentials, group=args.group, version=args.version) #product_id = 2720487 results = wait_for_results(product_id, credentials) print(results) verdict = results['results']['summary']['verdict'][ 'short'] + '\n' + results['results']['summary']['verdict']['detailed'] print(f"verdict: {verdict}", file=sys.stderr) get_vulns(product_id, credentials, filename=args.vulnfile) get_components(product_id, credentials, filename=args.componentfile) get_summary(product_id, credentials, filename=args.resultfile) if not verdict.startswith("Pass"): sys.exit(1) sys.exit(0) # Standard boilerplate to call the main() function to begin # the program. if __name__ == '__main__': main() libvpl-tools-1.3.0/.github/workflows/bdba/packages.txt000066400000000000000000000000451473010523400227430ustar00rootroot00000000000000git python3 python3-pip python3-venv libvpl-tools-1.3.0/.github/workflows/bdba/requirements.txt000066400000000000000000000000111473010523400237010ustar00rootroot00000000000000requests libvpl-tools-1.3.0/.github/workflows/cmake.yml000066400000000000000000000216121473010523400213420ustar00rootroot00000000000000--- name: CMake permissions: read-all on: workflow_dispatch: inputs: os: description: 'Operating system' required: false default: 'Linux' type: string build_type: description: 'Build type (Release, Debug, RelWithDebInfo, etc.)' required: false default: 'Release' type: string repository: description: 'Repository name with owner. For example, actions/checkout' required: false default: '' type: string ref: description: 'The branch, tag or SHA to build' required: false default: '' type: string artifact_name: description: 'Artifact name' required: false type: string run_tests: description: 'Run Tests' required: false default: false type: boolean no_artifacts: description: 'Do not upload artifacts' required: false default: false type: boolean dependent_artifact: description: 'Name of artifact to treat as dependency' required: false default: '' type: string configure_options: description: 'Extra options for CMake configure stage' required: false default: '' type: string docker_opts: description: 'extra options for docker build' required: false type: string secrets: token: description: 'Personal access token (PAT) used to fetch the repository' required: false workflow_call: inputs: os: description: 'Operating system' required: false default: 'Linux' type: string build_type: description: 'Build type (Release, Debug, RelWithDebInfo, etc.)' required: false default: 'Release' type: string repository: description: 'Repository name with owner. For example, actions/checkout' required: false default: '' type: string ref: description: 'The branch, tag or SHA to build' required: false default: '' type: string artifact_name: description: 'Artifact name' required: false type: string run_tests: description: 'Run Tests' required: false default: false type: boolean no_artifacts: description: 'Do not upload artifacts' required: false default: false type: boolean dependent_artifact: description: 'Name of artifact to treat as dependency' required: false default: '' type: string configure_options: description: 'Extra options for CMake configure stage' required: false default: '' type: string docker_opts: description: 'extra options for docker build' required: false type: string secrets: token: description: 'Personal access token (PAT) used to fetch the repository' required: false jobs: cmake: name: CMake ${{ inputs.os }} ${{ inputs.build_type }} runs-on: [self-hosted, "${{ inputs.os || 'Linux' }}"] env: dockerfile: 'docker-env/script/Dockerfile.rhel.build' container_name: 'vpl_build:rhel' steps: - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* - name: Checkout PR branch uses: actions/checkout@v4 with: path: source repository: ${{ inputs.repository }} ref: ${{ inputs.ref }} token: ${{ secrets.token || github.token }} - name: Checkout docker environment (Linux) if: always() && runner.os == 'Linux' uses: actions/checkout@v4 with: path: docker-env - name: Install dependencies (Linux) if: always() && runner.os == 'Linux' run: > docker build ${{ inputs.docker_opts }} -f "$dockerfile" -t $container_name "source/script" - name: Install dependencies (Windows) if: always() && runner.os == 'Windows' shell: cmd run: | echo on call source\script\bootstrap.bat if %errorlevel% neq 0 exit /b %errorlevel% - name: Download dependent artifact if: inputs.dependent_artifact uses: actions/download-artifact@v4 with: name: ${{ inputs.dependent_artifact }} path: deps - name: Extract dependent artifact (Linux) if: inputs.dependent_artifact && runner.os == 'Linux' run: unzip deps/*.zip -d _deps - name: Extract dependent artifact (Windows) if: inputs.dependent_artifact && runner.os == 'Windows' run: Expand-Archive -Force -Path deps\*.zip -DestinationPath _deps - name: Configure (Linux) if: always() && runner.os == 'Linux' run: | cat <<'EOL' > configure.sh #!/bin/bash set -o errexit deps_path=$(realpath _deps) cmake -B "source/_build" -S "source" \ -DBUILD_TESTS=${{ inputs.run_tests && 'ON' || 'OFF' }} \ -DCMAKE_BUILD_TYPE=${{ inputs.build_type || 'Release' }} \ -DCMAKE_INSTALL_LIBDIR=lib \ -DCMAKE_PREFIX_PATH=$deps_path \ ${{ inputs.configure_options }} \ -DENABLE_WARNING_AS_ERROR=ON EOL chmod +x configure.sh docker run --rm -v $(pwd):/tmp -w /tmp vpl_build:rhel ./configure.sh - name: Configure (Windows) if: always() && runner.os == 'Windows' shell: cmd run: | @echo off if exist _deps ( pushd _deps for /f %%i in ('cd') do set deps_path=%%i popd ) @echo on cmake -B "source\_build" ^ -S "source" ^ -DBUILD_TESTS=${{ inputs.run_tests && 'ON' || 'OFF' }} ^ -DCMAKE_PREFIX_PATH=%deps_path% ^ ${{ inputs.configure_options }} ^ -DENABLE_WARNING_AS_ERROR=ON if %errorlevel% neq 0 exit /b %errorlevel% - name: Build (Linux) if: runner.os == 'Linux' run: | cat <<'EOL' > build.sh #!/bin/bash set -o errexit cmake --build "source/_build" --verbose --parallel $(nproc) pushd "source/_build" cpack . popd EOL chmod +x build.sh docker run --rm -v $(pwd):/tmp -w /tmp vpl_build:rhel ./build.sh - name: Build (Windows) if: runner.os == 'Windows' shell: cmd run: | echo on cmake --build "source\_build" ^ --config ${{ inputs.build_type || 'Release' }} ^ --verbose ^ --parallel %NUMBER_OF_PROCESSORS% if %errorlevel% neq 0 exit /b %errorlevel% cmake --build "source\_build" ^ --config ${{ inputs.build_type || 'Release' }} ^ --target package if %errorlevel% neq 0 exit /b %errorlevel% - name: Upload build uses: actions/upload-artifact@v4 if: (success() || failure()) && ! inputs.no_artifacts with: name: > ${{ inputs.artifact_name || format('{0}-{1}-build', inputs.os || 'Linux', inputs.build_type || 'Release') }} # path: source/_build/*.zip path: source/_build/*-all.zip - name: Test (Linux) if: runner.os == 'Linux' && inputs.run_tests run: | cat <<'EOL' > test.sh #!/bin/bash set -o errexit ctest --test-dir "source/_build" \ -C ${{ inputs.build_type || 'Release' }} \ --output-on-failure \ -E .*-test \ --output-junit Testing/linux.xml EOL chmod +x test.sh docker run --rm -v $(pwd):/tmp -w /tmp vpl_build:rhel ./test.sh - name: Test (Windows) if: runner.os == 'Windows' && inputs.run_tests shell: cmd run: | echo on ctest --test-dir "source\_build" ^ -C ${{ inputs.build_type || 'Release' }} ^ --output-on-failure ^ -E .*-test ^ --output-junit Testing/windows.xml if %errorlevel% neq 0 exit /b %errorlevel% - name: Upload test results uses: actions/upload-artifact@v4 if: > (success() || failure()) && inputs.run_tests && ! inputs.no_artifacts with: name: > ${{ format('{0}-utests', inputs.artifact_name ) }} path: source/_build/Testing/*.xml - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* libvpl-tools-1.3.0/.github/workflows/coverity.yml000066400000000000000000000175361473010523400221400ustar00rootroot00000000000000--- name: Coverity Scan permissions: read-all on: workflow_dispatch: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string os: description: 'Operating system' required: false default: 'linux' type: string extra_opts: description: 'Extra Coverity options' required: false default: '' type: string dependent_artifact: description: 'Name of artifact to treat as dependency' required: false default: '' type: string configure_options: description: 'Extra options for CMake configure stage' required: false default: '' type: string workflow_call: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string os: description: 'Operating system' required: false default: 'linux' type: string extra_opts: description: 'Extra Coverity options' required: false default: '' type: string dependent_artifact: description: 'Name of artifact to treat as dependency' required: false default: '' type: string configure_options: description: 'Extra options for CMake configure stage' required: false default: '' type: string jobs: scan: runs-on: - self-hosted - "${{ inputs.os }}" - "${{ inputs.os == 'Windows' && 'Coverity' || 'docker' }}" steps: - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* - name: Checkout PR branch uses: actions/checkout@v4 with: path: source - name: Download dependent artifact if: inputs.dependent_artifact uses: actions/download-artifact@v4 with: name: ${{ inputs.dependent_artifact }} path: deps - name: Extract dependent artifact (Linux) if: inputs.dependent_artifact && runner.os == 'Linux' run: unzip deps/*.zip -d _deps - name: Extract dependent artifact (Windows) if: inputs.dependent_artifact && runner.os == 'Windows' run: Expand-Archive -Force -Path deps\*.zip -DestinationPath _deps - name: Install dependencies (Windows) if: always() && runner.os == 'Windows' shell: cmd run: | echo on call source\script\bootstrap.bat if %errorlevel% neq 0 exit /b %errorlevel% - name: Build vpl_build:ubuntu Docker image (Linux) if: success() && runner.os == 'Linux' run: | docker build "source/script" \ -f "source/script/Dockerfile.ubuntu.build" \ -t vpl_build:ubuntu - name: Build Docker image (Linux) if: success() && runner.os == 'Linux' run: | docker build "source/.github/workflows/coverity" \ -f "source/.github/workflows/coverity/Dockerfile.ubuntu.coverity" \ -t vpl_coverity:ubuntu \ --build-arg \ "COV_ANALYSIS_LINUX_URL=${{ vars.COV_ANALYSIS_LINUX_URL }}" \ --build-arg \ "COV_REPORTS_LINUX_URL=${{ vars.COV_REPORTS_LINUX_URL }}" \ --build-arg "COV_LICENSE_URL=${{ vars.COV_LICENSE_URL }}" - name: Run Scan (Linux) if: success() && runner.os == 'Linux' run: | cat > source/auth_key.txt < action.sh <> $GITHUB_STEP_SUMMARY cat source/_reports/text_report.txt >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY - name: Summarize (Windows) if: failure() && runner.os == 'Windows' run: | get-content source/_reports/text_report.txt echo '```' >> $env:GITHUB_STEP_SUMMARY get-content source/_reports/text_report.txt ` | Out-File -encoding ascii $env:GITHUB_STEP_SUMMARY echo '```' >> $env:GITHUB_STEP_SUMMARY - name: Record Artifacts uses: actions/upload-artifact@v4 if: success() || failure() with: name: ${{ inputs.os }}-${{ inputs.output_prefix }}coverity-scan path: source/_reports/* - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* libvpl-tools-1.3.0/.github/workflows/coverity/000077500000000000000000000000001473010523400214015ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/coverity/Dockerfile.ubuntu.coverity000066400000000000000000000031661473010523400265650ustar00rootroot00000000000000# ============================================================================== # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ============================================================================== ARG DOCKER_REGISTRY FROM ${DOCKER_REGISTRY}maven:3-jdk-11 AS coverity_install SHELL ["/bin/bash", "-xo", "pipefail", "-c"] ARG COV_ANALYSIS_LINUX_URL ARG COV_REPORTS_LINUX_URL ARG COV_LICENSE_URL RUN mkdir /opt/coverity \ && curl --silent --show-error -o /tmp/cov-analysis-linux64.sh \ -k ${COV_ANALYSIS_LINUX_URL} \ && curl --silent --show-error -o /tmp/cov-reports-linux64.sh \ -k ${COV_REPORTS_LINUX_URL} \ && curl --silent --show-error -o /opt/coverity/license.dat \ -k ${COV_LICENSE_URL} \ && chmod 777 /tmp/*.sh \ && /tmp/cov-reports-linux64.sh -q \ --installation.dir=/opt/coverity/reports/ \ && /tmp/cov-analysis-linux64.sh -q \ --installation.dir=/opt/coverity/analysis/ \ --license.agreement=agree \ --license.region=0 \ --license.type.choice=0 \ --license.cov.path=/opt/coverity/license.dat \ --component.sdk=false \ --component.skip.documentation=true ARG DOCKER_REGISTRY HEALTHCHECK CMD ls /opt/coverity || exit 1 FROM ${DOCKER_REGISTRY}vpl_build:ubuntu WORKDIR /setup COPY packages.txt packages.txt ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update \ && xargs -a packages.txt apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* ENV PATH="/opt/coverity/analysis/bin:/opt/coverity/reports/bin:${PATH}" COPY --from=coverity_install /opt/coverity /opt/coverity HEALTHCHECK CMD gcc --version || exit 1 libvpl-tools-1.3.0/.github/workflows/coverity/cmdline.py000066400000000000000000000137451473010523400234000ustar00rootroot00000000000000############################################################################ # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################ """commandline like tools""" import shutil import os import sys from contextlib import contextmanager import time import subprocess # nosec VERBOSE = False if 'VERBOSE' in os.environ: if os.environ['VERBOSE'] not in ['']: VERBOSE = True def _resolve_path(path): """resolve a path name from either a string, or a list of sub-paths""" if isinstance(path, list): return os.path.join(*path) return path def _escape_cmd_arg(arg): """quote/escape and argument for a command line call so that it can be safely used even if it has special charaters""" if ' ' in arg or '"' in arg: return '"' + arg.replace('"', '""') + '"' return arg def log(message): """Log activity""" if VERBOSE: print(f">> {message}", file=sys.stderr) sys.stderr.flush() @contextmanager def pushd(dst): """change working directory""" cur_dir = os.getcwd() dest = os.path.join(cur_dir, _resolve_path(dst)) os.chdir(dest) log(f'pushd {dest}') try: yield finally: log(f'popd -> {cur_dir}') os.chdir(cur_dir) #pylint: disable=invalid-name def rm(target): """delete a file or folder""" target = _resolve_path(target) if os.path.exists(target): # Delete sometimes fails if done immediately, timeout # is not great, but allows filesystem settings to stabilize. timeout = time.time() + 10 while time.time() < timeout: try: if os.path.isfile(target): log(f'rm {target}') os.remove(target) break if os.path.isdir(target): log(f'rm -rf {target}') shutil.rmtree(target) break except PermissionError: time.sleep(1) #pylint: disable=invalid-name def md(target): """make a folder""" target = _resolve_path(target) if target and not os.path.exists(target): log(f'mkdir -p {target}') os.makedirs(target) #pylint: disable=invalid-name def cp(src, dest): """copy a file or folder""" src = _resolve_path(src) dest = _resolve_path(dest) if os.path.exists(src): rm(dest) md(os.path.dirname(src)) if os.path.isfile(src): log(f'cp {src} {dest}') shutil.copyfile(src, dest) elif os.path.isdir(src): log(f'cp {src} {dest}') shutil.copytree(src, dest) else: raise RuntimeError("Cannot copy unknown file type") def join_command(command): """Join a series or parameters into a command, escaping if needed""" return ' '.join([_escape_cmd_arg(argument) for argument in command]) def run_command(*args, no_throw=False, env=None): """Run a command""" if len(args) == 1: cmd = args[0] else: cmd = join_command(args) log(f'{cmd}') if os.name != 'nt': cmd = "exec bash -c '" + cmd + "'" with subprocess.Popen(cmd, shell=True, env=env) as proc: # nosec proc.communicate() if not no_throw and proc.returncode != 0: raise RuntimeError("Error running command: " + cmd) return proc.returncode def run_commands(*args, no_throw=False, env=None): """Run several commands""" commands = [] for arg in args: if isinstance(arg, (str)): commands.append(arg) else: commands.append(join_command(arg)) if os.name == 'nt': script_file = "temp.bat" else: script_file = "temp.sh" with open(script_file, "w", encoding="utf-8") as script: log('echo "') for cmd in commands: log(f'{cmd}') script.write(cmd + "\n") log(f'" > {script_file}') log('{script_file}') if os.name == 'nt': cmd = script_file else: cmd = "exec bash -c 'source " + script_file + "'" with subprocess.Popen(cmd, shell=True, env=env) as proc: # nosec proc.communicate() rm(script_file) if not no_throw and proc.returncode != 0: raise RuntimeError("Error running: \n" + "\n".join(commands)) return proc.returncode def capture_command(*args, env=None): """Run a command and capture the output""" if len(args) == 1: cmd = args[0] else: cmd = join_command(args) log(f'{cmd}') if os.name != 'nt': cmd = "exec bash -c '" + cmd + "'" with subprocess.Popen( # nosec cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True, env=env) as proc: result = proc.communicate() return (result[0], result[1], proc.returncode) def capture_commands(*args, env=None): """Run several commands and capture the output""" commands = [] for arg in args: if not arg: continue if isinstance(arg, (str)): commands.append(arg) else: commands.append(join_command(arg)) script_file = None if os.name == 'nt': script_file = "temp.bat" else: script_file = "temp.sh" with open(script_file, "w", encoding="utf-8") as script: log('echo "') for cmd in commands: log(f'{cmd}') script.write(cmd + "\n") log(f'" > {script_file}') log('{script_file}') if os.name == 'nt': cmd = script_file else: cmd = "exec bash -c 'source " + script_file + "'" with subprocess.Popen( # nosec cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True, env=env) as proc: result = proc.communicate() rm(script_file) return (result[0], result[1], proc.returncode) libvpl-tools-1.3.0/.github/workflows/coverity/coverity.py000066400000000000000000000661031473010523400236250ustar00rootroot00000000000000#!/usr/bin/env python3 # PYTHON_ARGCOMPLETE_OK ############################################################################ # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################ """ Run Coverity scan. This tool requires that Coverity be available in the PATH for full functionality. """ # import modules used import argparse from argparse import RawTextHelpFormatter import os import sys import json import xml.etree.ElementTree as ET # nosec from xml.dom import minidom # nosec from cmdline import run_command, md, capture_command def pretty_print_xml(root): """Format xml for legibility""" serial_xml = ET.tostring(root) parsed_xml = minidom.parseString(serial_xml) # nosec return parsed_xml.toprettyxml() def summarize_server_state(issue, test_info): """Summarize state on server for an issue""" test_info["status"] = "" if "stateOnServer" in issue and issue["stateOnServer"]: state_on_server = issue["stateOnServer"] test_info["cid"] = state_on_server["cid"] test_info["stream"] = state_on_server["stream"] if "triage" in state_on_server: triage = state_on_server["triage"] test_info["status"] = triage["action"].lower() test_info["external_ref"] = triage["externalReference"] else: test_info["cid"] = "" test_info["stream"] = "" test_info["external_ref"] = "" def summarize_checker(issue, test_info): """Summarize checker properties for an issue""" if "checkerProperties" in issue and issue["checkerProperties"]: checker_properties = issue["checkerProperties"] test_info["desc"] = checker_properties["subcategoryLongDescription"] test_info["effect"] = checker_properties["subcategoryLocalEffect"] else: test_info["desc"] = issue["checkerName"] test_info["effect"] = issue["type"] def build_summary(test_info): """Generate a summary string based on test info""" summary = [] if test_info.get("desc"): summary.append(test_info["desc"]) if test_info.get("effect"): summary.append(test_info["effect"]) if test_info.get("mergeKey"): summary.append(f"Merge Key: {test_info['mergeKey']}") if test_info.get("cid"): summary.append(f"CID: {test_info['cid']}") if test_info.get("external_ref"): summary.append(f"see: {test_info['external_ref']}") return '\n\n'.join(summary) def build_message(test_info): """Generate a short message based on test info""" if test_info.get("external_ref"): return test_info.get("external_ref") return f"CID: {test_info['cid']}" class ReportSummary: """Summary of coverity report""" def __init__(self, report, problem_statuses): self.problems = 0 self.ignored = 0 self.problem_statuses = problem_statuses self.suite = [] self.summarize_report(report) def summarize_issue(self, issue): """Summarize a single issue""" test_info = {} self.suite.append(test_info) summarize_server_state(issue, test_info) test_info["ignore"] = test_info["status"] not in self.problem_statuses if test_info["ignore"]: self.ignored += 1 else: self.problems += 1 summarize_checker(issue, test_info) test_info['type'] = issue["checkerName"] test_info['file'] = issue["strippedMainEventFilePathname"] test_info['line'] = str(issue["mainEventLineNumber"]) test_info['col'] = str(issue["mainEventColumnNumber"]) test_info["name"] = ":".join( [test_info["file"], test_info["line"], test_info["col"]]) test_info["mergeKey"] = issue["mergeKey"] test_info["summary"] = build_summary(test_info) test_info["message"] = build_message(test_info) # pylint: disable=too-many-branches def summarize_report(self, report): """summarize a report""" if "issues" in report and report["issues"]: for issue in report["issues"]: self.summarize_issue(issue) def xunit_from_report(xunit, summary): """write an xunit report for the results""" testsuites = ET.Element('testsuites') testsuite = ET.SubElement(testsuites, "testsuite") testsuite.attrib["name"] = "coverity" suite_fail = 0 for test in summary.suite: testcase = ET.SubElement(testsuite, "testcase") testcase.attrib["classname"] = test['type'] testcase.attrib["name"] = test["name"] if test["ignore"]: testcase.attrib["status"] = "Skip" skipped = ET.SubElement(testcase, "skipped") skipped.text = test["summary"] skipped.attrib["message"] = test["message"] else: suite_fail += 1 testcase.attrib["status"] = "Fail" failure = ET.SubElement(testcase, "failure") failure.attrib["type"] = test['type'] failure.text = test["summary"] failure.attrib["message"] = test["message"] testsuite.attrib["tests"] = str(len(summary.suite)) testsuite.attrib["failures"] = str(summary.problems) testsuites.attrib["tests"] = str(len(summary.suite)) pretty_xunit_report = pretty_print_xml(testsuites) with open(xunit, "w", encoding="utf8") as xml_file: xml_file.write(pretty_xunit_report) def read_command_line(cmd_line): """ Read command line arguments """ # Now read full arguments parser = argparse.ArgumentParser(description=globals()['__doc__'], formatter_class=RawTextHelpFormatter) parser.add_argument('--version', action='version', version='%(prog)s 0.2.0') parser.add_argument('--url', action="store", required=True, help='Coverity Connect Server URL') parser.add_argument('--auth-key-file', action="store", default=None, help='Authentication key file.') parser.add_argument('--user', action="store", default=None, help='User name.') parser.add_argument('--email', action="store", required=True, help='User email.') parser.add_argument('--password', default=None, help='User password.') parser.add_argument('--stream', action="store", required=True, help='Coverity target stream') parser.add_argument('--project', action="store", required=True, help='Coverity target project') parser.add_argument('--project-name', action="store", required=True, help='Project Name') parser.add_argument('--bu', action="store", default="", help='Business unit (for reports)') parser.add_argument( '--report', action="store_true", help='Force reporting, even if no new issues were found') #build options parser.add_argument('--dir', dest='intermediate_dir', action='store', required=True, help="Intermediate directory") parser.add_argument('--strip-path', action='store', required=True, help="Path to remove from hits") parser.add_argument('--description', action='store', default="", help="Snapshot description") parser.add_argument('--report-dir', action='store', required=True, help="Report Directory") parser.add_argument('--code-version', action='store', required=True, help="Code version") parser.add_argument('command', help="Build command", nargs=argparse.REMAINDER, action='store') # parse arguments (will exit here on invalid args or help) args = parser.parse_args(args=cmd_line) # Resolve settings form arguments if args.auth_key_file: args.auth_key_file = os.path.abspath(args.auth_key_file) args.intermediate_dir = os.path.abspath(args.intermediate_dir) args.report_dir = os.path.abspath(args.report_dir) args.strip_path = os.path.abspath(args.strip_path) return args def configure_coverity(): """Configure Coverity""" if os.name == "nt": run_command("cov-configure", "--msvc") run_command("cov-configure", "--gcc") run_command("cov-configure", "--template", "--compiler", "cc", "--comptype", "gcc") run_command("cov-configure", "--template", "--compiler", "c++", "--comptype", "g++") def build_under_coverity(intermediate_dir, command, strip_path): """Build project under coverity""" run_command("cov-build", "--dir", intermediate_dir, *command) # Analyze run_command("cov-analyze", "--dir", intermediate_dir, "--strip-path", strip_path, "--enable-constraint-fpp", "--ticker-mode", "none", "--disable-default", "--concurrency", "--security", "--rule", "--enable-fnptr", "--enable-virtual", "--enable", "SECURE_CODING") # pylint: disable=too-many-arguments,too-many-locals,too-many-positional-arguments def get_preview_report(intermediate_dir, url, stream, code_version, strip_path, preview_report_v2_path, full_report_v9_path, active_report_v9_path, full_html_report_path, active_html_report_path, text_output=None, user=None, password=None, auth_key_file=None): """Generate preview reports (Pull from Coverity Connect, but don't push)""" env = os.environ.copy() if password is not None: env["COVERITY_PASSPHRASE"] = password if user is not None: env["COV_USER"] = user args = [ "--dir", intermediate_dir, "--url", url, "--stream", stream, "--ticker-mode", "none", "--version", code_version, "--strip-path", strip_path, "--preview-report-v2", preview_report_v2_path, ] if auth_key_file: args.extend([ "--auth-key-file", auth_key_file, ]) run_command("cov-commit-defects", *args, env=env) run_command("cov-format-errors", "--dir", intermediate_dir, "--no-default-triage-filters", "--preview-report-v2", preview_report_v2_path, "--json-output-v9", full_report_v9_path) run_command("cov-format-errors", "--dir", intermediate_dir, "--preview-report-v2", preview_report_v2_path, "--json-output-v9", active_report_v9_path) md(full_html_report_path) run_command("cov-format-errors", "--dir", intermediate_dir, "--no-default-triage-filters", "--preview-report-v2", preview_report_v2_path, "--html-output", full_html_report_path) md(active_html_report_path) run_command("cov-format-errors", "--dir", intermediate_dir, "--preview-report-v2", preview_report_v2_path, "--html-output", active_html_report_path) if text_output: with open(text_output, 'w', encoding="utf8") as dest: dest.write( capture_command("cov-format-errors", "--dir", intermediate_dir, "--preview-report-v2", preview_report_v2_path, "--triage-attribute-regex", "action", "Undecided", "--text-output-style", "multiline")[0]) # pylint: disable=too-many-arguments,too-many-positional-arguments def publish_to_coverity_connect(intermediate_dir, url, stream, code_version, strip_path, description, snapshot_id_path, user=None, password=None, auth_key_file=None): """Publish to Coverity Connect""" env = os.environ.copy() if password is not None: env["COVERITY_PASSPHRASE"] = password if user is not None: env["COV_USER"] = user args = [ "--dir", intermediate_dir, "--url", url, "--stream", stream, "--ticker-mode", "none", "--version", code_version, "--strip-path", strip_path, "--description", description, "--snapshot-id-file", snapshot_id_path, ] if auth_key_file: args.extend([ "--auth-key-file", auth_key_file, ]) run_command("cov-commit-defects", *args, env=env) def write_reports(args, snapshot_id): """Write PDF reports""" cfg = f"""# This is an example configuration file for Coverity report generators. It # tells report generators how to generate reports. You can make and modify a # copy of it for use in configuring a report generator. # # Its initial sections apply to all report generators. Later sections, # marked with "#######" comments, are intended for specific report # generators. # # This file is In YAML format, version 1.2. See # https://yaml.org/spec/1.2/spec.html for documentation on the format. # If you simply modify a copy of this file you probably do not need to # know details about YAML, however, be aware that structure is # determined by indentation, i.e., the number of spaces at the beginning # of the line. Multi-line text should be placed inside double quotes (""). # # The YAML constructs used here are "mappings" or key: value pairs. Some # mappings' values are file pathnames. Pathnames may use a slash or # backslash, whichever is appropriate for the platform, as a separator. # (Note that according to YAML's quoting rules, backslashes are not # special unless they are in a character sequence enclosed in double # quotes.) A relative pathname here is interpreted as relative to the # directory containing this configuration file. If the configuration did # not come from a file (e.g., was read on standard input), then the # pathname would be relative to the report generator process's working # directory. # ################## Sections that apply to all reports ############# # Describes information needed in interpreting the rest of the config. version: # The version of this file's schema. # Mandatory field schema-version: 6 # Describes settings needed for connecting to Coverity Connect. connection: # The URL of the Coverity Connect instance. # Mandatory field url: {args.url} # Coverity Connect username. Password or other authentication key # is passed in to the application from the command line. # Mandatory field username: {args.user} # Path to an optional file containing additional CA certificates for # use in establishing a secure HTTPS connection. These must be in PEM # format. ssl-ca-certs: # The name of the Coverity Connect project. # Mandatory field project: "{args.project}" # Describes the fields in the title page of the report. title-page: # Name of your company # Mandatory field company-name: "Intel" # Name of the software development project. May be distinct from the # Coverity Connect project name. # Mandatory field project-name: "{args.project_name}" # e.g. project-version: v.1.2.3 # Mandatory field project-version: {args.code_version} # Optional path to a logo file for your company. Valid image types # are bmp, gif, jpg, and png. The maximum allowed image size is 210 # pixels wide by 70 pixels high # Note that backslash characters in a path must be doubled # e.g. logo: C:\\logo\\ourlogo.jpg # e.g. logo: /var/logo/ourlogo.png logo: # Name of your division, group, team or other organizational unit. # Mandatory field organizational-unit-name: "IPAS" # Organizational unit term (e.g., division, group, team). # Mandatory field organizational-unit-term: "{args.bu}" # Name of the entity for which the report was prepared. # This is also known as project-contact-name for # CIR, CVSS, PCIDSS, MobileOwasp, and Owasp2017 # Mandatory field prepared-for: "{args.user}" # Project contact email address. It is used for # CIR, CVSS, PCIDSS, MobileOwasp, and Owasp2017 # Mandatory field project-contact-email: "{args.email}" # Name of the entity that prepared the report. # Mandatory field prepared-by: "{args.user}" # Locale of the report. Valid values are # en_US (English) # ja_JP (Japanese) # ko_KR (Korean) # zh_CN (Simplified Chinese) # Default is en_US locale: en_US # Some reports display information about individual issues. These reports # bound the number of issues displayed in order to control the size of the # report. This bound is called the issue cutoff count. It is used for CVSS, # Security, PCIDSS, MobileOwasp, Owasp2017 and DISA-STIG reports. # Default is 200. Maximum is 10000 for report. issue-cutoff-count: 4000 # Used for retrieving the defects of specific snapshot id, instead of using the latest snapshot id of # all the streams associated with project(default behavior). # It is not supported for CIR report. snapshot-id: {snapshot_id} # The most recent snapshot of each stream in the project whose date is less than or equal to the given date, # will be used to collect the defects instead of latest snapshot # Snapshot-id will get the highest priority if both snapshot-id and snapshot-date is present. # Snapshot date should be in MM/DD/YYYY format # It is not supported for CIR report. #snapshot-date: 03/29/2022 # An optional comma-separated list of Coverity Connect issue kinds # if issue kinds are listed here, the report will include issues only for # the listed issue kinds. # Possible values fore Issue Kind : # Quality # Security # example: Quality # example: Quality, Security # Default: Include issues for both Quality and Security issue-kind: SECURITY # Components specification # An optional comma-separated list of Coverity Connect component names, # including component map name. # If components are listed here, the report will include data only for # the listed components. # example: Default.lib,Default.src components: ################## CERT report ############# cert-report: # Used for CERT config for target level. Valid values are # F ==> Fully Compliant # L2 ==> L2 Compliant # L1 ==> L1 Compliant # Default is F target-level: L1 ################## Synopsys Software Integrity Report ############# ssir-report: # analysis date should be in MM/DD/YYYY format # Mandatory field analysis-date: # Optional legal text to display in the report # e.g: # "This is the first line of multiline legal text # and this is the second line." # legal-text: "" ################## Coverity Integrity Report ############# cir-report: # Project-related fields # The project-description defaults to its description in Coverity # Connect, if any. # e.g. project-details: Some details for this project # project-description: A short description of the project project-description: "" project-details: "" # Target integrity level. # 1 ==> < 1 defect per thousand lines of code # 2 ==> < .1 defects per thousand lines of code # 3 ==> < .01 defects per thousand lines of code, and other requirements # Default is 1 target-integrity-level: # Name of the highest severity value. # Default is "Major". high-severity-name: # Name of the unspecified severity value. # Default is "Unspecified". unspecified-severity-name: # Trial flag. Use "true" if page 3 of the report should not be generated. # Page 3 contains severity data which is not relevant for projects that # do not use severity. # Default is false trial: # LOC multiplier for the number of lines of code that have been # inspected. # Default is 1 loc-multiplier: # Whether to include Low impact defects for calculating the defect density # Possible values can be true, false # Default is false include-low-impact: ################## Coverity Security Report ############# security-report: # There are four Assurance Levels, representing Security Scores of greater # than or equal to 60, 70, 80, and 90. When choosing the Assurance Level, # consider the potential for damage to life, property, or reputation. # An application with high damage potential should have a # high Assurance Level. # Default is 90 without plugin # if plugin yaml is available in the environment, values will come from plugin # and the highest score will be the default assurance-level-score: 90 # A level indicating the minimum acceptable score for the report to # be considered passing. # Possible values can be (AL1, AL2, AL3, AL4) without plugin # and default will be AL1, if plugin yaml is available in the environment, values will come from plugin # and the highest score value will be the default assurance-level: AL1 # The name of the set of severity mappings used to determine the score # of each issue. See the documentation for a description of the severity # mapping. The first three mappings are built-in. "Custom" indicates # that the mapping identified by "custom-severity-mapping" and # "severity-mapping-description" should be used. # Valid values: # Carrier Grade # Web application # Desktop application # Custom # Default : Carrier Grade, if plugin yaml is available in the environment, values will come from plugin # and the first severity-mapping will be the default severity-mapping: Carrier grade # Optional descriptive text for custom severity mapping severity-mapping-description: "" # If severity-mapping is Custom then set the below severity map custom-severity-mapping: # Possible values are # very high # high # medium # low # very low # informational # Default is very high modify-data: Very High read-data: Very High dos-unreliable-execution: Very High dos-resource-consumption: Very High execute-unauthorized-code: Very High gain-privileges: Very High bypass-protection-mechanism: Very High hide-activities: Very High disa-stig: # DISA-STIG version # default version: V4 version: V5 """ with open("_covreport.yml", "w", encoding="utf8") as file: file.write(cfg) # Special environment with Coverity user information env = os.environ.copy() if args.password: env["COVERITY_PASSPHRASE"] = args.password if args.user: env["COV_USER"] = args.user report_path = os.path.join(args.report_dir, "cvss_report.pdf") arguments = [ "_covreport.yml", "--output", report_path, "--report", ] if args.password: arguments.extend([ "--password", "env:COVERITY_PASSPHRASE", ]) run_command("cov-generate-cvss-report", *arguments, env=env) report_path = os.path.join(args.report_dir, "security_report.pdf") arguments = [ "_covreport.yml", "--output", report_path, ] if args.password: arguments.extend([ "--password", "env:COVERITY_PASSPHRASE", ]) run_command("cov-generate-security-report", *arguments, env=env) def run(args): """main entry point """ # Set up analysis environment configure_coverity() md(args.intermediate_dir) build_under_coverity(args.intermediate_dir, args.command, args.strip_path) # Gather data md(os.path.join(args.report_dir, "json")) preview_report_v2_path = os.path.join(args.report_dir, "json", "preview_report_v2.json") json_report_path = os.path.join(args.report_dir, "json", "errors_v9_full.json") get_preview_report(args.intermediate_dir, args.url, args.stream, args.code_version, args.strip_path, preview_report_v2_path, json_report_path, os.path.join(args.report_dir, "json", "errors_v9_active.json"), os.path.join(args.report_dir, "html_full"), os.path.join(args.report_dir, "html_active"), os.path.join(args.report_dir, "text_report.txt"), user=args.user, password=args.password, auth_key_file=args.auth_key_file) error_count = 0 with open(json_report_path, "r", encoding="utf8") as json_file: report = json.load(json_file) summary = ReportSummary(report, ['undecided']) error_count = summary.problems xunit_from_report(os.path.join(args.report_dir, "xunit.xml"), summary) snapshot_id = None if args.report or error_count > 0: publish_to_coverity_connect(args.intermediate_dir, args.url, args.stream, args.code_version, args.strip_path, args.description, "_snapshot_id.txt", user=args.user, password=args.password, auth_key_file=args.auth_key_file) with open("_snapshot_id.txt", "r", encoding="utf8") as snapshot_id_file: snapshot_id = snapshot_id_file.read().strip() snapshot_info = {} if snapshot_id is not None: snapshot_info["id"] = snapshot_id snapshot_info["stream"] = args.stream snapshot_info["version"] = args.code_version with open(os.path.join(args.report_dir, "json", "info.json"), "w", encoding="utf8") as info_file: json.dump(snapshot_info, info_file, indent=2) if snapshot_id is not None: write_reports(args, snapshot_id) return error_count # pylint: disable=bare-except if __name__ == '__main__': sys.exit(run(read_command_line(None))) libvpl-tools-1.3.0/.github/workflows/coverity/packages.txt000066400000000000000000000000131473010523400237120ustar00rootroot00000000000000fontconfig libvpl-tools-1.3.0/.github/workflows/daily-validation.yml000066400000000000000000000222221473010523400235120ustar00rootroot00000000000000--- name: Daily permissions: read-all on: # Run on user request workflow_dispatch: inputs: upload_sdl: description: 'Trigger SDL Upload' required: false default: false type: boolean # Run on schedule schedule: # daily at 9:00 UTC (2:00 MST) - cron: '0 9 * * *' concurrency: # Cancel any existing jobs related to the target branch group: nightly-ci-${{ github.ref || github.run_id }} cancel-in-progress: true jobs: lint: if: true uses: ./.github/workflows/lint.yml scorecard: if: true uses: ./.github/workflows/scorecard.yml hadolint: if: true uses: ./.github/workflows/hadolint.yml with: output_prefix: tools- trivy: if: true uses: ./.github/workflows/trivy.yml with: output_prefix: tools- ip-leak-scan: if: true name: IP Leak Scan uses: ./.github/workflows/ipldt.yml secrets: inherit with: output_prefix: tools- source-malware-scan: if: true uses: ./.github/workflows/mcafee.yml secrets: inherit with: output_prefix: tools- # This job configures variables that are useful for other jobs. Other jobs # that depend on this one can access the variables via # needs.setup-variables.outputs. setup-variables: if: true uses: ./.github/workflows/setup-variables.yml secrets: inherit linux-build: if: true needs: setup-variables uses: ./.github/workflows/cmake.yml with: os: linux build_type: release artifact_name: linux-release-build run_tests: false no_artifacts: false repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} windows-build: if: true needs: setup-variables uses: ./.github/workflows/cmake.yml with: os: windows build_type: release artifact_name: windows-release-build run_tests: false no_artifacts: false repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} coverity: if: true needs: [linux-build, windows-build] strategy: fail-fast: false matrix: os: - linux - windows uses: ./.github/workflows/coverity.yml secrets: inherit with: os: ${{ matrix.os }} output_prefix: tools- dependent_artifact: ${{ matrix.os }}-release-build extra_opts: --report linux-tools-build: if: true needs: [linux-build] uses: ./.github/workflows/cmake.yml with: os: linux build_type: release artifact_name: linux-tools-build run_tests: true no_artifacts: false dependent_artifact: linux-release-build windows-tools-build: if: true needs: [windows-build] uses: ./.github/workflows/cmake.yml with: os: windows build_type: release artifact_name: windows-tools-build run_tests: true no_artifacts: false dependent_artifact: windows-release-build windows-malware-scan: if: true needs: [windows-tools-build] uses: ./.github/workflows/mcafee.yml secrets: inherit with: artifact_name: windows-tools-build output_prefix: tools- linux-malware-scan: if: true needs: [linux-tools-build] uses: ./.github/workflows/mcafee.yml secrets: inherit with: artifact_name: linux-tools-build output_prefix: tools- windows-sscb: if: true needs: [windows-tools-build] uses: ./.github/workflows/sscb.yml with: os: windows artifact_name: windows-tools-build output_prefix: tools- linux-sscb: if: true needs: [linux-tools-build] uses: ./.github/workflows/sscb.yml with: os: linux artifact_name: linux-tools-build output_prefix: tools- bdba: if: true needs: - linux-tools-build - windows-tools-build uses: ./.github/workflows/bdba.yml with: output_prefix: tools- version: ${{ github.ref_name }} pattern: "*-tools-build" secrets: inherit sdl: if: ${{ github.event.inputs.upload_sdl == 'true' }} needs: - linux-build - windows-build - summary - setup-variables uses: ./.github/workflows/sdl.yml with: SUMMARY_ARTIFACT: tools-release-summary LABEL: ${{ needs.setup-variables.outputs.tools_version }} SDLE_PROJECT: ${{vars.SDLE_ID}} SDLE_USER: ${{vars.SDLE_API_USER}} output_prefix: tools- secrets: SDLE_API_KEY: ${{ secrets.SDLE_API_KEY }} windows-acceptance: if: true needs: [windows-build, windows-tools-build, setup-variables] strategy: fail-fast: true matrix: gpu: - gen12.5 config: - release os: - windows uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: windows-tools-build gpu: ${{ matrix.gpu }} distro_family: windows distro_version: 11 test_ref: ${{ needs.setup-variables.outputs.test_ref }} linux-acceptance: if: true needs: [linux-build, linux-tools-build, setup-variables] strategy: fail-fast: true matrix: gpu: - gen12.5 distro: - family: ubuntu version: 22.04 config: - release os: - linux uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: linux-tools-build gpu: ${{ matrix.gpu }} distro_family: ${{ matrix.distro.family }} distro_version: ${{ matrix.distro.version }} test_ref: ${{ needs.setup-variables.outputs.test_ref }} ref-build: if: true needs: [setup-variables, linux-build, windows-build] strategy: fail-fast: true matrix: os: - windows - linux uses: ./.github/workflows/cmake.yml with: os: ${{ matrix.os }} build_type: release artifact_name: ${{ matrix.os }}-ref-build run_tests: false no_artifacts: false ref: ${{ needs.setup-variables.outputs.last_release_ref }} dependent_artifact: ${{ matrix.os }}-release-build diff-report: if: true needs: [linux-tools-build, windows-tools-build, ref-build] strategy: fail-fast: false matrix: os: [windows, linux] uses: ./.github/workflows/diff.yml with: report_name: ${{ matrix.os }}-tools-diff-report left: ${{ matrix.os }}-ref-build right: ${{ matrix.os }}-tools-build distro-tests: if: true needs: [linux-build, linux-tools-build, setup-variables] strategy: fail-fast: true matrix: distro: - family: rhel version: 8.6 - family: sles version: 15.4 gpu: - gen12.5 os: - linux config: - release uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: linux-tools-build gpu: ${{ matrix.gpu }} distro_family: ${{ matrix.distro.family }} distro_version: ${{ matrix.distro.version }} test_ref: ${{ needs.setup-variables.outputs.test_ref }} experimental-off: if: '! github.event.pull_request.draft' needs: setup-variables uses: ./.github/workflows/cmake.yml strategy: fail-fast: false matrix: os: - windows - linux with: os: ${{ matrix.os }} build_type: release artifact_name: ${{ matrix.os }}-experimental-off-build run_tests: false no_artifacts: false configure_options: >- -DBUILD_EXPERIMENTAL=OFF repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} tools-experimental-off: if: '! github.event.pull_request.draft' needs: [experimental-off] uses: ./.github/workflows/cmake.yml strategy: fail-fast: false matrix: os: - windows - linux with: os: ${{ matrix.os }} build_type: release artifact_name: ${{ matrix.os }}-tools-experimental-off-build run_tests: false no_artifacts: true configure_options: >- -DBUILD_EXPERIMENTAL=OFF dependent_artifact: ${{ matrix.os }}-experimental-off-build summary: if: "always()" needs: - hadolint - trivy - ip-leak-scan - source-malware-scan - coverity - windows-malware-scan - linux-malware-scan - windows-sscb - linux-sscb - bdba - diff-report - windows-acceptance - linux-acceptance - distro-tests - tools-experimental-off uses: ./.github/workflows/summary.yml with: output_prefix: tools- libvpl-tools-1.3.0/.github/workflows/diff.yml000066400000000000000000000042741473010523400211770ustar00rootroot00000000000000--- name: Diff report permissions: read-all on: workflow_call: inputs: report_name: description: 'name of artifact to store the diff' required: true type: string left: description: 'name of artifact for left side of compare' required: true type: string right: description: 'name of artifact for right side of compare' required: true type: string jobs: report: runs-on: [self-hosted, linux] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Download left artifact uses: actions/download-artifact@v4 with: name: ${{ inputs.left }} path: left - name: Extract package run: unzip left/*.zip -d ${{ inputs.left }} - name: Download right artifact uses: actions/download-artifact@v4 with: name: ${{ inputs.right }} path: right - name: Extract package run: unzip right/*.zip -d ${{ inputs.right }} - name: Checkout PR branch uses: actions/checkout@v4 with: path: source - name: Run Diff run: | mkdir report python3 source/.github/workflows/diff/bom_diff.py \ "${{ inputs.left }}" \ "${{ inputs.right }}" \ --mode All \ --title "Bom Report (Full)" \ --output report/FULL_BOM.html python3 source/.github/workflows/diff/bom_diff.py \ "${{ inputs.left }}" \ "${{ inputs.right }}" \ --mode Diff \ --title "Bom Report (Diff)" \ --output report/DIFF_BOM.html python3 source/.github/workflows/diff/bom_diff.py \ "${{ inputs.left }}" \ "${{ inputs.right }}" \ --mode Orphan \ --title "Bom Report (Orphan)" \ --output report/ORPHAN_BOM.html - name: Upload artifact uses: actions/upload-artifact@v4 if: (success() || failure()) && inputs.report_name with: name: ${{ inputs.report_name }} path: ./report/ - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/diff/000077500000000000000000000000001473010523400204455ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/diff/bom_diff.py000066400000000000000000000324401473010523400225670ustar00rootroot00000000000000#!/usr/bin/env python3 ############################################################################ # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################ """Generate BOM diffrence report""" import argparse import os import sys import datetime import contextlib import filecmp # pylint: disable=too-many-instance-attributes # pylint: disable=consider-using-dict-items class DiffInfo: """Diff information for a single tree node""" def log(self, msg): """Print a log message for this node""" print(" " * self.depth, end='') print(msg) # pylint: disable=too-many-arguments,too-many-positional-arguments def __init__(self, left, right, path='', name='', depth=0): self._cmp = None self._has_diff = False self._has_orphan = False self.depth = depth self.name = name self._root = {'left': left, 'right': right} self.rel_path = os.path.join(path, name) self._path = { 'left': os.path.join(left, self.rel_path), 'right': os.path.join(right, self.rel_path) } self.children = {} for side in self._path: self._extend(self._path[side]) def flags(self, side): """Get the flag info for one side of the node""" if side not in self._path: raise IndexError() if self.exists(side): return oct(os.stat(self._path[side]).st_mode)[2:] return '' def path(self, side): """Get the filesystem path for one side of the node""" if side not in self._path: raise IndexError() return self._path[side] def file_size(self, side): """Get the size of one side of the node""" if side not in self._path: raise IndexError() if self.exists(side): return os.stat(self._path[side]).st_size return '' def hasorphan(self): """Check if node is an orphan or has orphan children""" self.cmp() return self._has_orphan def hasdiff(self): """Check if node is a diffs or has diffs children""" self.cmp() return self._has_diff def exists(self, side): """Check if one side of the node exists in the filesystem""" if side not in self._path: raise IndexError() return os.path.exists(self._path[side]) def isfile(self, side): """Check if one side of the node is a file""" if side not in self._path: raise IndexError() return os.path.isfile(self._path[side]) def isdir(self, side): """Check if one side of the node is a folder""" if side not in self._path: raise IndexError() return os.path.isdir(self._path[side]) def invalidate(self): """Invalidate chached comparison info""" self._cmp = None for child in self.children: self.children[child].invalidate() # pylint: disable=too-many-branches, too-many-statements def cmp(self, check_flags=False): """Get comparison info for this node implies child nodes are checked as well. This information is cached, so multiple calls do not incure incresed cost""" if self._cmp: return self._cmp has_left = os.path.exists(self._path['left']) has_right = os.path.exists(self._path['right']) self._has_diff = False self._has_orphan = False if has_left and has_right: if self.isfile('left') and self.isfile('right'): if filecmp.cmp(self._path['left'], self._path['right'], False): if check_flags: if self.flags('left') == self.flags('right'): self._cmp = 'match' else: self._has_diff = True self._cmp = 'mismatch' else: self._cmp = 'match' else: self._has_diff = True self._cmp = 'mismatch' elif self.isdir('left') and self.isdir('right'): self._cmp = 'match' for child in self.children: child_cmp = self.children[child].cmp() if child_cmp != 'match': self._has_diff = True self._cmp = 'mismatch' if self.children[child].hasorphan(): self._has_orphan = True if self.children[child].hasdiff(): self._has_diff = True if check_flags and self._cmp == 'match': if self.flags('left') == self.flags('right'): self._cmp = 'match' else: self._has_diff = True self._cmp = 'mismatch' else: self._cmp = 'mismatch' self._has_diff = True for child in self.children: child_cmp = self.children[child].cmp() if self.children[child].hasorphan(): self._has_orphan = True if self.children[child].hasdiff(): self._has_diff = True elif has_left: self._has_diff = True self._has_orphan = True self._cmp = 'orphan-left' elif has_right: self._has_diff = True self._has_orphan = True self._cmp = 'orphan-right' else: self._cmp = 'missing' return self._cmp def _extend(self, child_path): """Add any items in the given path as children""" if os.path.isdir(child_path): for child in os.scandir(child_path): if child.name in self.children: continue self.children[child.name] = DiffInfo(self._root['left'], self._root['right'], self.rel_path, child.name, self.depth + 1) @contextlib.contextmanager def open_output(filename=None): """Open the main output stream, which may be stdout""" if filename is None or filename == '-': output = sys.stdout yield output else: with open(filename, 'w', encoding="utf8") as output: yield output # pylint: disable=too-many-branches, too-many-statements def print_row(root, mode, indent=0): """Print one row of the diff report""" relation = root.cmp() result = '' if mode == 'All': pass elif mode == 'Diff' and root.hasdiff(): pass elif mode == 'Orphan' and root.hasorphan(): pass else: return '' result += '' if relation == 'match': result += '\n' elif relation == 'mismatch': result += '' elif relation == 'orphan-left': result += '' elif relation == 'orphan-right': result += '' elif relation == 'missing': result += '' else: result += '' if root.exists('left'): if root.isdir('left'): sym = '📂' else: sym = '🗎' result += f'{"  " * indent}{sym} {root.name}' result += '' result += f'{root.file_size("left")}' result += f'{root.flags("left")}' if relation == 'match': sym = '=' result += f'{sym}' elif relation == 'mismatch': sym = '≠' result += f'{sym}' elif relation == 'orphan-left': sym = '' result += f'{sym}' elif relation == 'orphan-right': sym = '' result += f'{sym}' elif relation == 'missing': sym = '🦨' result += f'{sym}' else: sym = relation result += f'{sym}' if relation == 'match': result += '' elif relation == 'mismatch': result += '' elif relation == 'orphan-left': result += '' elif relation == 'orphan-right': result += '' elif relation == 'missing': result += '' else: result += '' if root.exists('right'): if root.isdir('right'): sym = '📂' else: sym = '🗎' result += f'{"  " * indent}{sym} {root.name}' result += '' result += f'{root.file_size("right")}' result += f'{root.flags("right")}' result += '' return result def print_tree(root, mode, indent=0): """Print root item and all children for the report""" result = '' if indent: result += print_row(root, mode, indent) for child in sorted(root.children): result += print_tree(root.children[child], mode, indent + 1) return result def write_report(root, title, mode): """Print report Supported modes are: All: Include all nodes Diff: Include only nodes with diffrences Orphan: Include only orphan nodes """ now = datetime.datetime.now() result = '' if mode == 'All': mode_name = 'All' elif mode == 'Diff': mode_name = 'Differences' elif mode == 'Orphan': mode_name = 'Orphans' else: mode_name = mode result += """ """ result += f"""{title} {title}
Produced: {now.strftime("%Y-%m-%d %H:%M:%S")}
   
Mode:  {mode_name}  
Left base folder: {os.path.abspath(root.path('left'))} 
Right base folder: {os.path.abspath(root.path('right'))} 
""" result += print_tree(root, mode, 0) result += """
Name Size Flags   Name Size Flags

""" return result def main(): """Main entrypoint""" parser = argparse.ArgumentParser( description=globals()['__doc__'], formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('left', action='store') parser.add_argument('right', action='store') parser.add_argument('--mode', '-m', default="All", action='store', choices=['All', 'Diff', 'Orphan']) parser.add_argument('--output', '-o', default=None, action='store') parser.add_argument('--title', '-t', default="BOM Diff", action='store') args = parser.parse_args() root = DiffInfo(args.left, args.right) root.cmp() report = write_report(root, args.title, args.mode) with open_output(args.output) as output: output.write(report) if __name__ == "__main__": main() libvpl-tools-1.3.0/.github/workflows/hadolint.yml000066400000000000000000000047441473010523400220730ustar00rootroot00000000000000--- name: Hadolint permissions: read-all on: workflow_call: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string jobs: scan: runs-on: [self-hosted, linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout PR branch uses: actions/checkout@v4 with: path: source - name: Pull docker image run: docker pull hadolint/hadolint - name: Lint run: | mkdir artifact echo "Hadolint Report" \ > artifact/hadolint.txt walk_dir () { shopt -s nullglob dotglob for pathname in "$1"/*; do retVal=0 if [ -d "$pathname" ]; then walk_dir "$pathname" || retVal=$? if [ $retVal -ne 0 ]; then RC=$retVal fi else case "$pathname" in *Dockerfile*|*dockerfile*) echo "Checking $pathname" echo "" >> artifact/hadolint.txt echo " $pathname" \ >> artifact/hadolint.txt echo "----------" \ >> artifact/hadolint.txt docker run --rm \ -i --attach stderr --attach stdout \ -v $(pwd)/source:/source \ -w /source \ hadolint/hadolint \ < $pathname 2>&1 \ >> artifact/hadolint.txt \ || retVal=$? if [ $retVal -ne 0 ]; then RC=$retVal fi esac fi done return $RC } walk_dir "$(pwd)/source" - name: Summarize if: (failure()) run: | echo '```' >> $GITHUB_STEP_SUMMARY cat artifact/hadolint.txt \ >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY - name: Report if: (success() || failure()) run: | cat artifact/hadolint.txt - name: Record Artifacts uses: actions/upload-artifact@v4 if: (success() || failure()) with: name: ${{ inputs.output_prefix }}hadolint path: artifact/* - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/ipldt.yml000066400000000000000000000060471473010523400214030ustar00rootroot00000000000000--- name: IP Leak Scan permissions: read-all on: workflow_dispatch: # allow direct call to support testing updates to disposition DB inputs: db_ref: description: 'The branch, tag or SHA to get DB from' default: ipldt type: string output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string workflow_call: inputs: db_ref: description: 'The branch, tag or SHA to get DB from' required: false type: string output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string jobs: scan: runs-on: [self-hosted, Linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout PR branch uses: actions/checkout@v4 with: path: source - name: Build Docker image run: > docker build "source/.github/workflows/ipldt" -f "source/.github/workflows/ipldt/Dockerfile.ubuntu.ipldt" --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) -t vpl_ipldt:ubuntu --build-arg "IPLDB_TOOL_URL=${{ vars.IPLDB_TOOL_URL }}" - name: Checkout Dispositions uses: actions/checkout@v4 with: path: db ref: ${{ inputs.db_ref || 'ipldt' }} - name: Package Source run: | pushd source git archive HEAD -o ../source.zip popd - name: Scan source in container continue-on-error: false run: | cat <<'EOL' > action.sh #!/bin/bash set -x set +o errexit set -o pipefail /opt/ipldt3_lin_intel64/ipldt3_lin_intel64 \ -i source.zip \ -c 37 \ --usedb db/ipldt_results.ip.db \ --usedb db/ipldt_results.ipevt.db \ --usedb db/ipldt_results.tm.db \ -s db/stringfile.yaml.0 \ --db-rel-path \ --gendb _logs/ip-leak-scan/hits-linux.db \ --r-overwrite \ --r _logs/ip-leak-scan \ | tee _logs/ipldt.txt ret=$? set +o pipefail exit $ret EOL chmod +x action.sh mkdir -p _logs/ip-leak-scan docker run --rm -v $PWD:/working -w /working \ vpl_ipldt:ubuntu ./action.sh mv _logs/ipldt.txt _logs/ip-leak-scan/ipldt_results.txt - name: Report if: success() || failure() run: | echo '```' >> $GITHUB_STEP_SUMMARY cat _logs/ip-leak-scan/ipldt_results.txt >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY - name: Record Artifacts uses: actions/upload-artifact@v4 if: success() || failure() with: name: ${{ inputs.output_prefix }}ip-leak-scan path: _logs/ip-leak-scan - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/ipldt/000077500000000000000000000000001473010523400206515ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/ipldt/Dockerfile.ubuntu.ipldt000066400000000000000000000017761473010523400253120ustar00rootroot00000000000000# ============================================================================== # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ============================================================================== ARG DOCKER_REGISTRY FROM ${DOCKER_REGISTRY}ubuntu:22.04 WORKDIR /setup COPY packages.txt packages.txt ARG DEBIAN_FRONTEND=noninteractive ARG IPLDB_TOOL_URL RUN apt-get update \ && xargs -a packages.txt apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && mkdir -p /tmp \ && mkdir -p /opt \ && curl -s -o /tmp/ipldt3_lin_intel64.tgz -k "${IPLDB_TOOL_URL}" \ && tar -xzvf /tmp/ipldt3_lin_intel64.tgz -C /opt \ && ls /opt \ && ls /opt/ipldt3_lin_intel64 \ && rm -rf /tmp/ipldt3_lin_intel64.tgz # Use non-root user ARG GROUP_ID=10000 ARG USER_ID=10001 RUN addgroup --gid ${GROUP_ID} --system appgroup \ && adduser --uid ${USER_ID} --system --gid ${GROUP_ID} appuser USER appuser HEALTHCHECK CMD /opt/ipldt3_lin_intel64/ipldt3_lin_intel64 --v || exit 1 libvpl-tools-1.3.0/.github/workflows/ipldt/packages.txt000066400000000000000000000000051473010523400231630ustar00rootroot00000000000000curl libvpl-tools-1.3.0/.github/workflows/lint.yml000066400000000000000000000025071473010523400212320ustar00rootroot00000000000000--- name: Lint permissions: read-all on: workflow_call: workflow_dispatch: jobs: lint: name: Lint runs-on: [self-hosted, linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout PR branch and all history uses: actions/checkout@v4 with: path: source fetch-depth: 0 - name: Build Docker image run: > docker build "source/.github/workflows/lint" -f "source/.github/workflows/lint/Dockerfile.ubuntu.lint" --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) -t vpl_lint:ubuntu - name: Lint source in container run: | cat <<'EOL' > lint.sh #!/bin/bash set -x set -o pipefail source/script/test lint | tee lint.log ret=$? set +o pipefail exit $ret EOL chmod +x lint.sh docker run --rm -v $(pwd):/tmp/work -w /tmp/work \ vpl_lint:ubuntu ./lint.sh - name: Report if: success() || failure() run: | echo '```' >> $GITHUB_STEP_SUMMARY cat lint.log >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/lint/000077500000000000000000000000001473010523400205035ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/lint/Dockerfile.ubuntu.lint000066400000000000000000000016051473010523400247650ustar00rootroot00000000000000# ============================================================================== # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ============================================================================== ARG DOCKER_REGISTRY FROM ${DOCKER_REGISTRY}ubuntu:22.04 WORKDIR /setup COPY requirements.txt requirements.txt COPY packages.txt packages.txt ARG DEBIAN_FRONTEND=noninteractive ENV PIP_ROOT_USER_ACTION=ignore RUN apt-get update \ && xargs -a packages.txt apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir -U -r requirements.txt # Use non-root user ARG GROUP_ID=10000 ARG USER_ID=10001 RUN addgroup --gid ${GROUP_ID} --system appgroup \ && adduser --uid ${USER_ID} --system --gid ${GROUP_ID} appuser USER appuser RUN git config --global safe.directory '*' HEALTHCHECK CMD pre-commit --version || exit 1 libvpl-tools-1.3.0/.github/workflows/lint/packages.txt000066400000000000000000000000201473010523400230120ustar00rootroot00000000000000python3-pip git libvpl-tools-1.3.0/.github/workflows/lint/requirements.txt000066400000000000000000000000401473010523400237610ustar00rootroot00000000000000virtualenv==v20.24.5 pre-commit libvpl-tools-1.3.0/.github/workflows/mcafee.yml000066400000000000000000000056131473010523400215050ustar00rootroot00000000000000--- name: Malware scan permissions: read-all on: workflow_call: inputs: artifact_name: description: 'Artifact to test' type: string caas: description: 'CaaS Image' type: string write_test_file: description: 'Write EICAR test file (see https://www.eicar.org/)' type: boolean output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string jobs: scan: runs-on: [self-hosted, linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * # get files to scan - name: Checkout dispatcher source if: success() && !inputs.artifact_name uses: actions/checkout@v4 with: path: product - name: Download package if: success() && inputs.artifact_name uses: actions/download-artifact@v4 with: name: ${{ inputs.artifact_name }} path: product - name: Extract package if: success() && inputs.artifact_name run: | if compgen -G "product/*.zip" > /dev/null; then unzip product/*.zip -d product fi # Write test file if requested - name: Write EICAR test file if: success() && inputs.write_test_file run: > echo 'X5O!P%@AP[4\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*' > product/eicar-com.com # CaaS based testing - name: Pull CaaS docker image if: success() && inputs.caas run: | docker pull ${{ inputs.caas }} - name: Run Test using CaaS image if: success() && inputs.caas run: > docker run -v $(realpath product):/scanme --rm ${{ inputs.caas }} >> report.txt # Local image based testing - name: Checkout av scanning tool if: success() && !inputs.caas uses: actions/checkout@v4 with: repository: ${{ vars.AV_TOOL_REPO }} token: ${{ secrets.TEST_REPO_TOKEN }} path: av-scanning ref: master - name: Create docker image if: success() && !inputs.caas run: | pushd av-scanning sed -i 's|FROM.*ubuntu:latest|FROM public.ecr.aws/lts/ubuntu:22.04|' \ Dockerfile docker build -t mcafee:latest . popd - name: Run Test if: success() && !inputs.caas run: | docker run --rm -v $(realpath product):/scanme \ mcafee:latest >> report.txt # Publish - name: Upload test results uses: actions/upload-artifact@v4 if: success() || failure() with: name: ${{ inputs.artifact_name || 'source' }}-malware-scan path: report.txt - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/pr-draft.yml000066400000000000000000000074731473010523400220120ustar00rootroot00000000000000--- name: Smoke permissions: read-all on: # Run on pull requests pull_request: # Run on user request workflow_dispatch: concurrency: # Cancel any existing jobs related to the target branch group: draft-ci-${{ github.ref || github.run_id }} cancel-in-progress: true jobs: lint: if: 'github.event.pull_request.draft' uses: ./.github/workflows/lint.yml hadolint: if: 'github.event.pull_request.draft' uses: ./.github/workflows/hadolint.yml with: output_prefix: tools- trivy: if: 'github.event.pull_request.draft' uses: ./.github/workflows/trivy.yml with: output_prefix: tools- # This job configures variables that are useful for other jobs. Other jobs # that depend on this one can access the variables via # needs.setup-variables.outputs. setup-variables: if: 'github.event.pull_request.draft' uses: ./.github/workflows/setup-variables.yml secrets: inherit linux-build: if: 'github.event.pull_request.draft' needs: setup-variables uses: ./.github/workflows/cmake.yml with: os: linux build_type: release artifact_name: linux-release-build run_tests: false no_artifacts: false repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} windows-build: if: 'github.event.pull_request.draft' needs: setup-variables uses: ./.github/workflows/cmake.yml with: os: windows build_type: release artifact_name: windows-release-build run_tests: false no_artifacts: false repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} linux-tools-build: if: 'github.event.pull_request.draft' needs: [linux-build] uses: ./.github/workflows/cmake.yml with: os: linux build_type: release artifact_name: linux-tools-build run_tests: true no_artifacts: false dependent_artifact: linux-release-build windows-tools-build: if: 'github.event.pull_request.draft' needs: [windows-build] uses: ./.github/workflows/cmake.yml with: os: windows build_type: release artifact_name: windows-tools-build run_tests: true no_artifacts: false dependent_artifact: windows-release-build windows-acceptance: if: 'github.event.pull_request.draft' needs: [windows-build, windows-tools-build, setup-variables] strategy: fail-fast: true matrix: gpu: - gen12.5 config: - release os: - windows uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: windows-tools-build gpu: ${{ matrix.gpu }} distro_family: windows distro_version: 11 test_ref: ${{ needs.setup-variables.outputs.test_ref }} linux-acceptance: if: 'github.event.pull_request.draft' needs: [linux-build, linux-tools-build, setup-variables] strategy: fail-fast: true matrix: gpu: - gen12.5 distro: - family: ubuntu version: 22.04 config: - release os: - linux uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: linux-tools-build gpu: ${{ matrix.gpu }} distro_family: ${{ matrix.distro.family }} distro_version: ${{ matrix.distro.version }} test_ref: ${{ needs.setup-variables.outputs.test_ref }} libvpl-tools-1.3.0/.github/workflows/pr-ready.yml000066400000000000000000000161341473010523400220100ustar00rootroot00000000000000--- name: Check permissions: read-all on: # Run on pull requests, including when readiness state is changed pull_request: types: ['opened', 'synchronize', 'reopened', 'ready_for_review'] # Run on user request workflow_dispatch: concurrency: # Cancel any existing jobs related to the target branch group: ready-ci-${{ github.ref || github.run_id }} cancel-in-progress: true jobs: lint: if: '! github.event.pull_request.draft' uses: ./.github/workflows/lint.yml scorecard: if: true uses: ./.github/workflows/scorecard.yml hadolint: if: '! github.event.pull_request.draft' uses: ./.github/workflows/hadolint.yml with: output_prefix: tools- trivy: if: '! github.event.pull_request.draft' uses: ./.github/workflows/trivy.yml with: output_prefix: tools- ip-leak-scan: if: '! github.event.pull_request.draft' name: IP Leak Scan uses: ./.github/workflows/ipldt.yml secrets: inherit with: output_prefix: tools- # This job configures variables that are useful for other jobs. Other jobs # that depend on this one can access the variables via # needs.setup-variables.outputs. setup-variables: if: '! github.event.pull_request.draft' uses: ./.github/workflows/setup-variables.yml secrets: inherit linux-build: if: '! github.event.pull_request.draft' needs: setup-variables uses: ./.github/workflows/cmake.yml with: os: linux build_type: release artifact_name: linux-release-build run_tests: false no_artifacts: false repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} windows-build: if: '! github.event.pull_request.draft' needs: setup-variables uses: ./.github/workflows/cmake.yml with: os: windows build_type: release artifact_name: windows-release-build run_tests: false no_artifacts: false repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} coverity: if: '! github.event.pull_request.draft' needs: [linux-build, windows-build] strategy: fail-fast: false matrix: os: - linux - windows uses: ./.github/workflows/coverity.yml secrets: inherit with: os: ${{ matrix.os }} output_prefix: tools- dependent_artifact: ${{ matrix.os }}-release-build linux-tools-build: if: '! github.event.pull_request.draft' needs: [linux-build] uses: ./.github/workflows/cmake.yml with: os: linux build_type: release artifact_name: linux-tools-build run_tests: true no_artifacts: false dependent_artifact: linux-release-build windows-tools-build: if: '! github.event.pull_request.draft' needs: [windows-build] uses: ./.github/workflows/cmake.yml with: os: windows build_type: release artifact_name: windows-tools-build run_tests: true no_artifacts: false dependent_artifact: windows-release-build windows-sscb: if: '! github.event.pull_request.draft' needs: [windows-tools-build] uses: ./.github/workflows/sscb.yml with: os: windows artifact_name: windows-tools-build output_prefix: tools- linux-sscb: if: '! github.event.pull_request.draft' needs: [linux-tools-build] uses: ./.github/workflows/sscb.yml with: os: linux artifact_name: linux-tools-build output_prefix: tools- bdba: if: '! github.event.pull_request.draft' needs: - linux-tools-build - windows-tools-build uses: ./.github/workflows/bdba.yml with: output_prefix: tools- version: ${{ github.ref_name }} pattern: "*-tools-build" secrets: inherit windows-acceptance: if: '! github.event.pull_request.draft' needs: [windows-build, windows-tools-build, setup-variables] strategy: fail-fast: true matrix: gpu: - gen12.5 config: - release os: - windows uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: windows-tools-build gpu: ${{ matrix.gpu }} distro_family: windows distro_version: 11 test_ref: ${{ needs.setup-variables.outputs.test_ref }} linux-acceptance: if: '! github.event.pull_request.draft' needs: [linux-build, linux-tools-build, setup-variables] strategy: fail-fast: true matrix: gpu: - gen12.5 distro: - family: ubuntu version: 22.04 config: - release os: - linux uses: ./.github/workflows/acceptance.yml secrets: inherit with: os: ${{ matrix.os }} build_type: ${{ matrix.config }} lib_artifact: ${{ matrix.os }}-${{ matrix.config }}-build tools_artifact: linux-tools-build gpu: ${{ matrix.gpu }} distro_family: ${{ matrix.distro.family }} distro_version: ${{ matrix.distro.version }} test_ref: ${{ needs.setup-variables.outputs.test_ref }} experimental-off: if: '! github.event.pull_request.draft' needs: setup-variables uses: ./.github/workflows/cmake.yml strategy: fail-fast: false matrix: os: - windows - linux with: os: ${{ matrix.os }} build_type: release artifact_name: ${{ matrix.os }}-experimental-off-build run_tests: false no_artifacts: false configure_options: >- -DBUILD_EXPERIMENTAL=OFF repository: ${{ vars.DISP_REPO }} ref: ${{ needs.setup-variables.outputs.lib_ref }} secrets: token: ${{ secrets.DISP_REPO_TOKEN }} tools-experimental-off: if: '! github.event.pull_request.draft' needs: [experimental-off] uses: ./.github/workflows/cmake.yml strategy: fail-fast: false matrix: os: - windows - linux with: os: ${{ matrix.os }} build_type: release artifact_name: ${{ matrix.os }}-tools-experimental-off-build run_tests: false no_artifacts: true configure_options: >- -DBUILD_EXPERIMENTAL=OFF dependent_artifact: ${{ matrix.os }}-experimental-off-build required: if: '! github.event.pull_request.draft' needs: - lint - scorecard - hadolint - trivy - ip-leak-scan - coverity - windows-sscb - linux-sscb - bdba - windows-acceptance - linux-acceptance - tools-experimental-off runs-on: [self-hosted] steps: - name: Required Checks run: cd . summary: if: "always()" needs: - hadolint - trivy - ip-leak-scan - coverity - windows-sscb - linux-sscb - bdba - windows-acceptance - linux-acceptance uses: ./.github/workflows/summary.yml with: output_prefix: tools- libvpl-tools-1.3.0/.github/workflows/scorecard.yml000066400000000000000000000027611473010523400222330ustar00rootroot00000000000000--- name: OSSF Scorecard permissions: read-all on: workflow_call: workflow_dispatch: jobs: scorecard: name: Scorecard runs-on: [self-hosted, linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout PR branch uses: actions/checkout@v4 with: path: source - name: Pull Docker image run: > docker pull gcr.io/openssf/scorecard:stable - name: Perform required checks run: > docker run --rm -v $(pwd):/tmp/work -w /tmp/work gcr.io/openssf/scorecard:stable --checks=Token-Permissions,Dangerous-Workflow,Binary-Artifacts --show-details --verbosity warn --local /tmp/work/source > scorecard.txt - name: Generate full report run: > docker run --rm -v $(pwd):/tmp/work -w /tmp/work gcr.io/openssf/scorecard:stable --local /tmp/work/source --format json > scorecard.json - name: Check run: > python3 source/.github/workflows/scorecard/check.py scorecard.json --config source/.github/workflows/scorecard/config.yml - name: Report if: success() || failure() run: | echo '```' >> $GITHUB_STEP_SUMMARY cat scorecard.txt >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/scorecard/000077500000000000000000000000001473010523400215025ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/scorecard/check.py000066400000000000000000000033461473010523400231370ustar00rootroot00000000000000#!/usr/bin/env python """Check to see if results from scorecard exceed minimum required values.""" import argparse import json import sys import yaml def get_options(): """Parse command line.""" description = __doc__.split('\n', maxsplit=1)[0] parser = argparse.ArgumentParser(description=description) parser.add_argument('scorecard', metavar='FILE', help='JSON file containing scorecard') parser.add_argument('--config', metavar='FILE', help='YAML file with required minimum scores') return parser.parse_args() def get_scores(scorecard): """Extract checks from scorecard as key value pairs""" result = {} for check in scorecard['checks']: result[check['name']] = check['score'] return result def main(scorecard, config): """Compare scorecard scores with expectations from config. Return number of checks that fail to meet expectations. """ num_fails = 0 checks = config.get('Checks') scores = get_scores(scorecard) for check, minval in checks.items(): score = scores.get(check, -1) if score < minval: num_fails += 1 print(f"{check:23}: {score:2}/{minval:2}" f"{' ':6} {'Passed' if score >= minval else 'Failed'}") return num_fails if __name__ == "__main__": options = get_options() with open(options.scorecard, 'r', encoding="utf-8") as json_file: score_dict = json.load(json_file) config_dict = {'Checks': {}} if options.config: with open(options.config, 'r', encoding="utf-8") as yaml_file: config_dict = yaml.safe_load(yaml_file) sys.exit(main(score_dict, config_dict)) libvpl-tools-1.3.0/.github/workflows/scorecard/config.yml000066400000000000000000000006111473010523400234700ustar00rootroot00000000000000--- Checks: Binary-Artifacts: 10 Branch-Protection: -1 CI-Tests: -1 CII-Best-Practices: -1 Code-Review: -1 Contributors: -1 Dangerous-Workflow: 10 Dependency-Update-Tool: -1 Fuzzing: -1 License: -1 Maintained: -1 Packaging: -1 Pinned-Dependencies: -1 SAST: -1 Security-Policy: -1 Signed-Releases: -1 Token-Permissions: 10 Vulnerabilities: -1 Webhooks: -1 libvpl-tools-1.3.0/.github/workflows/sdl.yml000066400000000000000000000102031473010523400210360ustar00rootroot00000000000000--- name: Upload SDL Evidence permissions: read-all on: workflow_dispatch: inputs: summary_artifact: description: 'Summary_Artifact Zip File Name' type: string required: true label: description: 'SDL-E label (tag) for uploads' type: string required: true sdle_project: description: 'SDL-E Project ID' type: string required: true sdle_user: description: 'SDL-E User ID' type: string required: true output_prefix: description: 'Prefix to add to output artifacts' required: true default: '' type: string workflow_call: inputs: summary_artifact: description: 'Summary_Artifact Zip File Name' type: string required: true label: description: 'SDL-E label (tag) for uploads' type: string required: true sdle_project: description: 'SDL-E Project ID' type: string required: true sdle_user: description: 'SDL-E User ID' type: string required: true output_prefix: description: 'Prefix to add to output artifacts' required: true default: '' type: string secrets: SDLE_API_KEY: description: 'SDL-E Api Key for User' required: true jobs: upload_files: runs-on: [self-hosted, linux] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout Repository uses: actions/checkout@v4 with: path: source - name: Download Release Summary uses: actions/download-artifact@v4 with: name: tools-release-summary path: artifacts - name: Move artifacts to target directory run: | mv artifacts/* source/.github/workflows/sdl/ ls -al source/.github/workflows/sdl/sdl/tools - name: Build Docker image run: > docker build "source/.github/workflows/sdl" -f "source/.github/workflows/sdl/Dockerfile.ubuntu.sdl" -t vpl_sdl:ubuntu - name: Upload SDL evidence run: | # note: quotes around 'EOL' prevent bash variable # expansion while writing file. cat <<'EOL' > upload.sh #!/bin/bash ls source/.github/workflows/sdl FILE_PATH="source/.github/workflows/sdl/sdl/tools" function publish() { TASK="$1" EVIDENCE="$2" python3 source/.github/workflows/sdl/evidence_upload.py \ --api_key "${{ secrets.SDLE_API_KEY }}" \ --user_id ${{ inputs.sdle_user }} \ --project_id ${{ inputs.sdle_project }} \ --task_id ${TASK} \ --file_paths ${FILE_PATH}/${EVIDENCE} \ --label ${{ inputs.label }} \ --output_prefix ${{ inputs.output_prefix }} } publish CT7 CT7-KnownVulnerabilities/vulns.csv publish CT7 CT7-KnownVulnerabilities/results.pdf publish CT36 CT36-RegisterComponents/components.csv publish CT37 CT37-MalwareScan/source-report.txt publish CT37 CT37-MalwareScan/windows-report.txt publish CT39 CT39-StaticAnalysis/linux-coverity.json publish CT39 CT39-StaticAnalysis/linux-cvss_report.pdf publish CT39 CT39-StaticAnalysis/linux-security_report.pdf publish CT39 CT39-StaticAnalysis/windows-coverity.json publish CT39 CT39-StaticAnalysis/windows-cvss_report.pdf publish CT39 CT39-StaticAnalysis/windows-security_report.pdf publish CT40 CT40-SecurityValidation/linux.csv publish CT40 CT40-SecurityValidation/windows.csv publish CT151 CT151-CompilerFlags/SSCB_SCAN_results-Windows.html publish CT151 CT151-CompilerFlags/SSCB_SCAN_results-Linux.html publish CT247 CT247-Trivy/trivy-report.csv EOL chmod a+x upload.sh ls -l pwd docker run --rm -v $(pwd):/tmp/work -w /tmp/work \ vpl_sdl:ubuntu ./upload.sh - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/sdl/000077500000000000000000000000001473010523400203175ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/sdl/Dockerfile.ubuntu.sdl000066400000000000000000000016031473010523400244130ustar00rootroot00000000000000# ============================================================================== # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ============================================================================== ARG DOCKER_REGISTRY FROM ${DOCKER_REGISTRY}ubuntu:22.04 WORKDIR /setup COPY requirements.txt requirements.txt COPY packages.txt packages.txt ARG DEBIAN_FRONTEND=noninteractive ENV PIP_ROOT_USER_ACTION=ignore RUN apt-get update \ && xargs -a packages.txt apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir -U -r requirements.txt \ && git config --global safe.directory '*' # Use non-root user ARG GROUP_ID=10000 ARG USER_ID=10001 RUN addgroup --gid ${GROUP_ID} --system appgroup \ && adduser --uid ${USER_ID} --system --gid ${GROUP_ID} appuser USER appuser HEALTHCHECK CMD python3 --version || exit 1 libvpl-tools-1.3.0/.github/workflows/sdl/evidence_upload.py000066400000000000000000000115011473010523400240150ustar00rootroot00000000000000""" Module Description: This script provides a function to upload files for SDL evidence. Usage: python evidence_upload.py --api_key --user_id --project_id --task_id --file_paths [--label ] """ import os import argparse from urllib.parse import quote, quote_plus import sys import requests import urllib3 urllib3.disable_warnings() def upload_file(api_key, user_id, project_id, task_id, file_path, labels, output_prefix): # pylint: disable=too-many-locals # pylint: disable=too-many-arguments # pylint: disable=too-many-positional-arguments """ Upload a file for a specific task. Parameters: api_key (str): API key for authentication. user_id (str): User ID. task_id (str): Task ID. project_id (int): Project ID. file_path (str): Path to the file to be uploaded. labels (list): List of labels or tags for the file upload. output_prefix (str): Prefix for output messages. """ # URL encode user-provided values user_id_encoded = quote(user_id) project_id_encoded = quote_plus(str(project_id)) task_id_encoded = quote_plus(task_id) # Build URL base_url = "https://sdl-e.app.intel.com/uploader/v1/evidence/uploads/documents/creators" upload_url = (f"{base_url}/{user_id_encoded}" f"?projectId={project_id_encoded}&taskId={task_id_encoded}") if labels: for label in labels: if label: upload_url = f"{upload_url}&label={quote_plus(label)}" # Set headers headers = { "apikey": api_key, } # Prepare files for upload in multipart/form-data format try: file_name_with_prefix = f"{output_prefix}{os.path.basename(file_path)}" with open(file_path, 'rb') as file: files = {'file': (file_name_with_prefix, file)} except OSError as exception: print(f"Failed to open file {file_path}: {exception}") sys.exit(1) try: # Make the request response = requests.post(upload_url, headers=headers, files=files, verify=False) print(f"Response Content: {response.text}") # Check for success if response.status_code == 200: response_text = response.text if 'Documents were uploaded correctly.' in response_text: print(f"File for Task {task_id} uploaded successfully!") else: print( f"Failed to upload file for Task {task_id}. Response: {response_text}" ) sys.exit(response.status_code) else: print( f"Failed to upload file for Task {task_id}. " f"Status Code: {response.status_code}, Response: {response.text}" ) sys.exit(response.status_code) except requests.RequestException as exception: print(f"Request failed for Task {task_id}: {exception}") sys.exit(1) def main(): """ Main function for uploading files. Command-line Arguments: --api_key (str): API key for authentication. --user_id (str): User ID. --project_id (int): Project ID. --task_id (str): Task ID. --file_paths (list): List of file paths. --label (list, optional): Label or tags for this file upload. --output_prefix (str): Prefix for output messages. """ # Create argument parser parser = argparse.ArgumentParser(description="Upload files.") # Add arguments parser.add_argument("--api_key", required=True, help="API key for authentication") parser.add_argument("--user_id", required=True, help="User ID") parser.add_argument("--project_id", required=True, type=int, help="Project ID") parser.add_argument("--task_id", required=True, help="Task ID") parser.add_argument("--file_paths", required=True, nargs="+", help="List of file paths") parser.add_argument("--label", required=False, nargs="*", help="Label or tags for this file upload") parser.add_argument("--output_prefix", required=True, help="Prefix for output messages") # Parse command-line arguments args = parser.parse_args() # Upload files for file_path in args.file_paths: upload_file(args.api_key, args.user_id, args.project_id, args.task_id, file_path, args.label, args.output_prefix) sys.exit(0) if __name__ == "__main__": main() libvpl-tools-1.3.0/.github/workflows/sdl/packages.txt000066400000000000000000000000451473010523400226350ustar00rootroot00000000000000git python3 python3-pip python3-venv libvpl-tools-1.3.0/.github/workflows/sdl/requirements.txt000066400000000000000000000000101473010523400235720ustar00rootroot00000000000000requestslibvpl-tools-1.3.0/.github/workflows/setup-variables.yml000066400000000000000000000126341473010523400233740ustar00rootroot00000000000000--- permissions: read-all on: workflow_call: # Map the workflow outputs to job outputs outputs: last_release_ref: description: "Ref to last release" value: ${{ jobs.configure.outputs.last_release_ref }} test_ref: description: "Ref in test repo to be used" value: ${{ jobs.configure.outputs.test_ref }} lib_ref: description: "Ref in lib repo to be used" value: ${{ jobs.configure.outputs.lib_ref }} tools_version: description: "tools version in tool repo to be used" value: ${{ jobs.configure.outputs.tools_version }} # This workflow configures variables that are useful for other jobs. Other # jobs that depend on this one can access the variables via # needs..outputs. jobs: configure: runs-on: [self-hosted, linux] outputs: last_release_ref: ${{ env.last_release_ref }} test_ref: ${{ env.test_ref }} lib_ref: ${{ env.lib_ref }} tools_version: ${{env.tools_version}} env: last_release_ref: '' test_ref: '' lib_ref: '' tools_version: '' steps: - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * # Get ref of last release. - name: Checkout PR branch and all history uses: actions/checkout@v4 with: path: source fetch-depth: 0 ref: '${{ github.event.pull_request.head.sha }}' - name: Get tools version run: | cd source # Extract the version from version.txt and store it in a variable echo "tools_version=$(cat version.txt)" >> $GITHUB_ENV - name: Get ref of last release id: run run: | cd source echo "last_release_ref=$(git describe --abbrev=0 --tags --match=v*)" \ >> $GITHUB_ENV # Get ref of test to be used. If this is a pull request prefer a branch # of the same name as the branch being merged into otherwise try to use a # branch of the same name otherwise use main - name: Checkout tests from base_ref if: github.base_ref id: check-tests-from-base_ref uses: actions/checkout@v4 continue-on-error: true with: repository: ${{ vars.TEST_REPO }} token: ${{ secrets.TEST_REPO_TOKEN }} path: tests fetch-depth: 0 ref: ${{ github.base_ref }} - name: Use tests from base_ref if: steps.check-tests-from-base_ref.outcome == 'success' id: use-tests-from-base_ref run: | echo "test_ref=${{ github.base_ref }}" >> $GITHUB_ENV - name: Checkout tests from ref_name if: steps.check-tests-from-base_ref.outcome != 'success' id: check-tests-from-ref_name uses: actions/checkout@v4 continue-on-error: true with: repository: ${{ vars.TEST_REPO }} token: ${{ secrets.TEST_REPO_TOKEN }} path: tests fetch-depth: 0 ref: ${{ github.ref_name }} - name: Use tests from ref_name if: steps.check-tests-from-ref_name.outcome == 'success' id: use-tests-from-ref_name run: | echo "test_ref=${{ github.ref_name }}" >> $GITHUB_ENV - name: Use tests from default if: > steps.check-tests-from-base_ref.outcome != 'success' && steps.check-tests-from-ref_name.outcome != 'success' run: | echo "test_ref=main" >> $GITHUB_ENV # Get ref of lib to be used. If this is a pull request prefer a branch # of the same name as the branch being merged into otherwise try to use a # branch of the same name otherwise use main - name: Checkout lib from base_ref if: github.base_ref id: check-lib-from-base_ref uses: actions/checkout@v4 continue-on-error: true with: repository: ${{ vars.DISP_REPO }} token: ${{ secrets.DISP_REPO_TOKEN }} path: lib fetch-depth: 0 ref: ${{ github.base_ref }} - name: Use lib from base_ref if: steps.check-lib-from-base_ref.outcome == 'success' id: use-lib-from-base_ref run: | echo "lib_ref=${{ github.base_ref }}" >> $GITHUB_ENV - name: Checkout lib from ref_name if: steps.check-lib-from-base_ref.outcome != 'success' id: check-lib-from-ref_name uses: actions/checkout@v4 continue-on-error: true with: repository: ${{ vars.DISP_REPO }} token: ${{ secrets.DISP_REPO_TOKEN }} path: lib fetch-depth: 0 ref: ${{ github.ref_name }} - name: Use lib from ref_name if: steps.check-lib-from-ref_name.outcome == 'success' id: use-lib-from-ref_name run: | echo "lib_ref=${{ github.ref_name }}" >> $GITHUB_ENV - name: Use lib from default if: > steps.check-lib-from-base_ref.outcome != 'success' && steps.check-lib-from-ref_name.outcome != 'success' run: | echo "lib_ref=main" >> $GITHUB_ENV - name: Report if: always() run: | echo "last_release_ref=${{ env.last_release_ref }}" echo "test_ref=${{ env.test_ref }}" echo "lib_ref=${{ env.lib_ref }}" - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/sscb.yml000066400000000000000000000106021473010523400212110ustar00rootroot00000000000000--- name: Security Scan for Compiled Binaries permissions: read-all on: workflow_call: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string os: description: 'Operating system' required: true type: string artifact_name: description: 'Artifact to scan' required: true type: string jobs: scan: runs-on: [self-hosted, "${{ inputs.os }}"] steps: - name: Cleanup workspace (Linux) if: always() && runner.os == 'Linux' run: sudo rm -rf ..?* .[!.]* * - name: Cleanup workspace (Windows) if: always() && runner.os == 'Windows' run: Remove-Item -Recurse -Force .\* - name: Download package uses: actions/download-artifact@v4 with: name: ${{ inputs.artifact_name }} path: package - name: Extract package (Linux) if: success() && runner.os == 'Linux' run: unzip package/*.zip -d _install - name: Extract package (Windows) if: success() && runner.os == 'Windows' run: Expand-Archive -Force -Path package\*.zip -DestinationPath _install - name: Checkout PR branch uses: actions/checkout@v4 with: path: source ref: ${{ github.event.pull_request.head.sha }} - name: Build Docker image (Linux) if: success() && runner.os == 'Linux' run: > docker build "source/.github/workflows/sscb" -f "source/.github/workflows/sscb/Dockerfile.ubuntu.sscb" --build-arg USER_ID=$(id -u) --build-arg GROUP_ID=$(id -g) -t vpl_sscb:ubuntu --build-arg "SSCB_TOOL_URL=${{ vars.COMPILER_SETTINGS_TOOL_URL }}" - name: Run SSCB Scan (Linux) if: success() && runner.os == 'Linux' run: | cat >action.sh <= 3 else 'Unknown' # Function to determine if an issue should be included based on various criteria def issues_included(issue_key, severity, ignored_issues, ignored_severities, ignored_file, file_name): """ Determines if an issue should be included based on various criteria. Args: issue_key (str): The issue key. severity (str): The severity of the issue. ignored_issues (list): A list of issue keys to ignore. ignored_severities (list): A list of severities to ignore. ignored_file (list): A list of file names to ignore. file_name (str): The file name of the issue. Returns: bool: True if the issue should be included, False otherwise. """ return (issue_key not in ignored_issues and severity not in ignored_severities and file_name not in ignored_file) # Main function to extract issues from JSON data def get_issues(data, ignored_issues, ignored_severities, ignored_file): """ Extract issues from the parsed JSON data, excluding specific criteria. Args: data (dict): The parsed data from JSON. ignored_issues (list): A list of issue keys to ignore. ignored_severities (list): A list of severities to ignore. ignored_file (list): A list of file names to ignore. Returns: list of dict: A list of issue dictionaries. """ issues = [] for file_key, issue_details in data.items(): if not file_key.startswith('File'): continue full_file_name = issue_details.get('File Name', 'Unknown') file_name = extract_file_name(full_file_name) for key, value in issue_details.items(): if not (isinstance(value, str) and '[' in value and ']' in value): continue start = value.find('[') + 1 end = value.find(']') severity_with_info = value[start:end].strip().split(' ')[0] if issues_included(key, severity_with_info, ignored_issues, ignored_severities, ignored_file, file_name): issues.append({ "issue_name": key, "severity": severity_with_info, "file": file_name }) return issues def pass_or_fail(issues): """ Determine the overall pass/fail status based on the list of issues. Args: issues (list of dict): A list of issues. Returns: str: "Pass" if there are no issues, otherwise "Fail". """ # Determine if the result is pass or fail based on remaining issues return "Pass" if not issues else "Fail" def main(): """ Main part of the script. Parses command-line arguments to obtain JSON and YAML file names, loads data from these files, processes it to filter out ignored issues, and prints a summary of the analysis including the overall status and details of each issue. """ # Create the parser parser = argparse.ArgumentParser() # Add arguments for the JSON and YAML file names parser.add_argument('json_file') parser.add_argument('yaml_file') # Parse the command-line arguments args = parser.parse_args() # Use the parsed arguments to get the file names json_data = load_json(args.json_file) ignore_config = load_yaml(args.yaml_file) # Retrieve ignored issues from YAML configuration ignored_issues_list, ignored_severities_list, ignored_file_list = get_ignored_issues( ignore_config) # Get the list of issues while excluding the ignored ones issues_list = get_issues(json_data, ignored_issues_list, ignored_severities_list, ignored_file_list) # Determine the overall pass or fail status result = pass_or_fail(issues_list) print("Status:", result) # Output the overall status and counts of each severity level print("High severity count:", high_count(issues_list)) print("Medium severity count:", medium_count(issues_list)) print("Low severity count:", low_count(issues_list)) print("Info severity count:", info_count(issues_list)) # Output the details of each issue not ignored for issue in issues_list: print(f"Issue Name: {issue['issue_name']}; " f"Severity: {issue['severity']}; " f"File: {issue['file']}") if __name__ == "__main__": main() libvpl-tools-1.3.0/.github/workflows/summary.yml000066400000000000000000000144401473010523400217600ustar00rootroot00000000000000--- name: Release Summary permissions: read-all on: workflow_call: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string env: BDBA: ${{ inputs.output_prefix }}bdba-scan HADOLINT: ${{ inputs.output_prefix }}hadolint IPLEAKS: ${{ inputs.output_prefix }}ip-leak-scan TRIVY: ${{ inputs.output_prefix }}trivy COVERITY_L: linux-${{ inputs.output_prefix }}coverity-scan COVERITY_W: windows-${{ inputs.output_prefix }}coverity-scan DIFF_L: linux-${{ inputs.output_prefix }}diff-report DIFF_W: windows-${{ inputs.output_prefix }}diff-report AV_S: source-malware-scan AV_L: linux-tools-build-malware-scan AV_W: windows-tools-build-malware-scan UTEST_L: linux-tools-build-utests UTEST_W: windows-tools-build-utests SSCB_L: linux-${{ inputs.output_prefix }}sscb SSCB_W: windows-${{ inputs.output_prefix }}sscb RHEL86: rhel8.6-release-gen12.5-acceptance SLES154: sles15.4-release-gen12.5-acceptance U2204: ubuntu22.04-release-gen12.5-acceptance WIN11: windows11-release-gen12.5-acceptance jobs: report: runs-on: [self-hosted, Linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Download All Artifacts uses: actions/download-artifact@v4 with: path: artifacts - name: Checkout PR branch uses: actions/checkout@v4 with: path: source ref: ${{ github.event.pull_request.head.sha }} - name: BOM artifacts if: > !cancelled() run: | . source/.github/workflows/summary/tools.sh export source_root=artifacts export dest_root=summary/bom/tools copy_all_artifacts "$DIFF_L" Linux copy_all_artifacts "$DIFF_W" Windows - name: SDL artifacts if: > !cancelled() run: | . source/.github/workflows/summary/tools.sh export source_root=artifacts export dest_root=summary/sdl/tools copy_artifact "$BDBA" vulns.csv CT7-KnownVulnerabilities copy_artifact "$BDBA" results.pdf CT7-KnownVulnerabilities copy_artifact "$BDBA" components.csv CT36-RegisterComponents copy_artifact "$AV_S" report.txt CT37-MalwareScan source-report.txt copy_artifact "$AV_L" report.txt CT37-MalwareScan linux-report.txt copy_artifact "$AV_W" report.txt CT37-MalwareScan windows-report.txt copy_artifact "$COVERITY_L/json" errors_v9_full.json \ CT39-StaticAnalysis linux-coverity.json copy_artifact "$COVERITY_L" cvss_report.pdf \ CT39-StaticAnalysis linux-cvss_report.pdf copy_artifact "$COVERITY_L" security_report.pdf \ CT39-StaticAnalysis linux-security_report.pdf copy_artifact "$COVERITY_W/json" errors_v9_full.json \ CT39-StaticAnalysis windows-coverity.json copy_artifact "$COVERITY_W" cvss_report.pdf \ CT39-StaticAnalysis windows-cvss_report.pdf copy_artifact "$COVERITY_W" security_report.pdf \ CT39-StaticAnalysis windows-security_report.pdf copy_artifact "$TRIVY" trivy-report.csv CT247-Trivy copy_all_artifacts $SSCB_L CT151-CompilerFlags copy_all_artifacts $SSCB_W CT151-CompilerFlags - name: SWLC artifacts run: | . source/.github/workflows/summary/tools.sh export source_root=artifacts export dest_root=summary/swlc/tools copy_all_artifacts "$IPLEAKS" ip_leaks - name: Quality artifacts if: > !cancelled() run: | . source/.github/workflows/summary/tools.sh export source_root=artifacts export dest_root=summary/quality copy_artifact "$UTEST_L" linux.xml unit/tools linux.xml copy_artifact "$UTEST_W" windows.xml unit/tools windows.xml copy_all_artifacts "$U2204" acceptance/Ubuntu22.04/gen12.5 copy_all_artifacts "$WIN11" acceptance/Win11/gen12.5 copy_all_artifacts "$SLES154" acceptance/SLES15.4/gen12.5 copy_all_artifacts "$RHEL86" acceptance/RHEL8.6/gen12.5 copy_artifact linux-performance summary.csv \ performance linux-performance-summary.csv copy_artifact linux-performance summary.md \ performance linux-performance-summary.md - name: Build Docker image run: > docker build ${{ inputs.docker_opts }} -f "source/.github/workflows/summary/Dockerfile.ubuntu.summary" -t vpl_summary:ubuntu "source/.github/workflows/summary" - name: Evaluate Results run: | if [ -d summary/quality ] then docker run --rm -v $(pwd):/tmp/work -w /tmp/work \ vpl_summary:ubuntu \ python3 \ "/tmp/work/source/.github/workflows/summary/summarize_testing.py" \ /tmp/work/summary/quality fi - name: Report security related tests run: | if [ -d summary/quality/unit/tools ] then mkdir -p -v summary/sdl/tools/CT40-SecurityValidation docker run --rm -v $(pwd):/tmp/work -w /tmp/work \ vpl_summary:ubuntu \ python3 \ "/tmp/work/source/.github/workflows/summary/filter_xunit.py" \ "/tmp/work/summary/quality/unit/tools/linux.xml" \ -o "/tmp/work/summary/sdl/tools/CT40-SecurityValidation/linux.csv" \ -i Double Null Unsupported Invalid docker run --rm -v $(pwd):/tmp/work -w /tmp/work \ vpl_summary:ubuntu \ python3 \ "/tmp/work/source/.github/workflows/summary/filter_xunit.py" \ "/tmp/work/summary/quality/unit/tools/windows.xml" \ -o \ "/tmp/work/summary/sdl/tools/CT40-SecurityValidation/windows.csv" \ -i Double Null Unsupported Invalid fi - name: Upload Summary if: success() || failure() uses: actions/upload-artifact@v4 with: name: ${{ inputs.output_prefix }}release-summary path: summary/* - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/summary/000077500000000000000000000000001473010523400212325ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/summary/Dockerfile.ubuntu.summary000066400000000000000000000013111473010523400262350ustar00rootroot00000000000000# ============================================================================== # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ============================================================================== ARG DOCKER_REGISTRY FROM ${DOCKER_REGISTRY}ubuntu:22.04 WORKDIR /setup COPY requirements.txt requirements.txt COPY packages.txt packages.txt ARG DEBIAN_FRONTEND=noninteractive ENV PIP_ROOT_USER_ACTION=ignore RUN apt-get update \ && xargs -a packages.txt apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir -U -r requirements.txt \ && git config --global safe.directory '*' HEALTHCHECK CMD python3 --version || exit 1 libvpl-tools-1.3.0/.github/workflows/summary/filter_xunit.py000066400000000000000000000062671473010523400243330ustar00rootroot00000000000000#!/usr/bin/env python3 ############################################################################ # Copyright (C) 2020 Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################ """ Post-process xUnit tests. """ import argparse import os import csv import defusedxml.ElementTree as DET ## Alternate get testcase ... name +if skipped failure or error def read_command_line(cmd_line): """Read command line arguments""" parser = argparse.ArgumentParser( description=globals()['__doc__'], formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('xunit_path', metavar="FILE", help="xUnit file to process", type=os.path.abspath, action='store') parser.add_argument('-i', dest='include_list', nargs='+', metavar="substring", help="list of test name substrings to include", default=["Double", "Null", "Unsupported", "Invalid"]) parser.add_argument('-o', dest='csv_path', metavar="csvfile", help="path to the generated output file", default="out.csv") args = parser.parse_args(args=cmd_line) return args def read_xunit(xunit_path): """Read data from xunit file into dictionary""" print("filename:", xunit_path) tree = DET.parse(xunit_path) root = tree.getroot() data = {} for test_case in root.findall("testcase"): name = "" if "classname" in test_case.attrib: classname = test_case.attrib["classname"].split(".") if classname: name += ".".join(classname) + "." name += test_case.attrib['name'] failure = test_case.find("failure") if failure is not None: data[name] = "failure" continue error = test_case.find("error") if error is not None: data[name] = "error" continue skip = test_case.find("skipped") if skip is not None: data[name] = "skipped" continue data[name] = "pass" return data def filter_xunit(data, include_list): """Filter xunit data""" filtered_data = { k: v for k, v in data.items() if any(substring in k for substring in include_list) } return filtered_data def write_csv(data, csv_file): """Write data to CSV file""" with open(csv_file, 'w', newline='', encoding="utf-8") as csvfile: csvwriter = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL) for k, result in data.items(): csvwriter.writerow([k, result]) def run(args): """main entry point""" raw_data = read_xunit(args.xunit_path) filtered_data = filter_xunit(raw_data, args.include_list) write_csv(filtered_data, args.csv_path) if __name__ == '__main__': run(read_command_line(None)) libvpl-tools-1.3.0/.github/workflows/summary/packages.txt000066400000000000000000000000451473010523400235500ustar00rootroot00000000000000git python3 python3-pip python3-venv libvpl-tools-1.3.0/.github/workflows/summary/requirements.txt000066400000000000000000000000251473010523400245130ustar00rootroot00000000000000defusedxml xlsxwriterlibvpl-tools-1.3.0/.github/workflows/summary/summarize_testing.py000066400000000000000000001161641473010523400253660ustar00rootroot00000000000000#!/usr/bin/env python3 ############################################################################ # Copyright (C) 2020 Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################ """ Summarize validation results. The current version of this script expects to be able to use standard Operating System ways of traversing a Release artifact tree. This can be accomplished by mounting the sharepoint folder in OneDrive, or by downloading a copy of the folder. Supported test formats (auto-detected): XML (*.xml): Robot Framework XML Format jUnit XML Format (xUnit) CSV (*.csv): Teamcity Test Result CSV Grid Report CSV General CSV Delimiter may be: ',' (comma), ';' (semi-colon), or '\\t' (tab) (optional) Test Suite Column may be: 'suite', 'suite name', or 'test suite' Test Name Column may be: 'test', 'test name', 'test case', or 'case' Result Column may be: 'status', or 'result' Test name is: "." if Test Suite is set, otherwise "" Passing Results are (case insensitive): 'pass', 'passed', or 'ok' Failing Results are (case insensitive): 'fail', 'failed', or 'error' Skipped Results are (case insensitive): 'skip', 'skipped', 'ignore', or 'ignored' The defect database file is formatted as json in the form: { "Package Name" : { "Jira ID" : [ "Test Name", ] }, } for example: { "msdk_compatibility" : { "VPL7-1762" : [ "hevce_10b_420_i010_reset.30", "hevce_8b_420_iyuv_reset.2", "hevce_8b_420_iyuv_reset.30" ] }, "operating systems" : { "VPL7-3005" : [ "oneVPL-cpu.Pipeline.1toN.Fused.Decvpp.H264.to I420 & BGRA system memory" ], "VPL7-3072" : [ "oneVPL-gen.Decode.H264.Handles corrupted data", "oneVPL-gen.Decode.H264.I420" ] } } """ # pylint: disable=too-many-lines import argparse from argparse import RawTextHelpFormatter from contextlib import contextmanager from glob import glob import os import sys import csv import json from xml.etree.ElementTree import SubElement # nosec from xml.etree.ElementTree import Element # nosec import xlsxwriter from defusedxml import minidom import defusedxml.ElementTree as DET SCRIPT_PATH = os.path.realpath( os.path.join(os.getcwd(), os.path.dirname(__file__))) def pretty_print_xml(root): """Format xml for legibility""" serial_xml = DET.tostring(root) parsed_xml = minidom.parseString(serial_xml) return parsed_xml.toprettyxml() @contextmanager def pushd(dest): """change working directory""" cur_dir = os.getcwd() os.chdir(dest) try: yield finally: os.chdir(cur_dir) INDENT_LEVEL = 0 INDENT = "" # pylint: disable=global-statement @contextmanager def log_scope(): """Add a scope level to the logging stack""" global INDENT_LEVEL global INDENT INDENT_LEVEL += 1 INDENT = "\t" * INDENT_LEVEL yield INDENT_LEVEL -= 1 INDENT = "\t" * INDENT_LEVEL def log(msg: str): """Log a message, indenting all lines based on the log scope""" for line in msg.splitlines(): print(f"{INDENT}{line}") def log_summary(name, summary): """Log the summary of a test suite""" unique = [ summary.unique_attempt_count, summary.unique_run_count, summary.unique_pass_count, summary.unique_fail_count, summary.unique_skip_count ] unique = [str(value) for value in unique] total = [ summary.total_attempt_count, summary.total_run_count, summary.total_pass_count, summary.total_fail_count, summary.total_skip_count ] total = [str(value) for value in total] if name is not None: log(name) log(" Attempt\tRun\tPass\tFail\tSkip") log(" Total:\t" + ("\t".join(total))) log("Unique:\t" + ("\t".join(unique))) class TestResult: """Results of running a test (possibly more than once)""" def __init__(self, name): self.classname = None self.name = name self.runs = 0 self.passes = 0 self.fails = 0 self.skips = 0 self._dirty = True def clean(self): """Clear "dirty" flag for summary caching""" self._dirty = False @property def is_dirty(self): """Check "dirty" flag for summary caching""" return self._dirty def squash(self): """Rewrite result to look like a single test execution""" if self.fails: self.runs = 1 self.passes = 0 self.fails = 1 self.skips = 0 elif self.passes: self.runs = 1 self.passes = 1 self.fails = 0 self.skips = 0 elif self.skips: self.runs = 0 self.passes = 0 self.fails = 0 self.skips = 1 self._dirty = True def fill_xunit(self, suite_xml, test_class=None): """Add test result to an XML element in xUnit format""" for _ in range(self.passes): testcase = SubElement(suite_xml, "testcase") testcase.attrib["name"] = str(self.name) testcase.attrib["status"] = "Pass" if test_class is not None: testcase.attrib["classname"] = str(test_class) for _ in range(self.fails): testcase = SubElement(suite_xml, "testcase") testcase.attrib["name"] = str(self.name) if test_class is not None: testcase.attrib["classname"] = str(test_class) testcase.attrib["status"] = "Fail" failure = SubElement(testcase, "failure") failure.attrib["type"] = "Test Failed" for _ in range(self.skips): testcase = SubElement(suite_xml, "testcase") if test_class is not None: testcase.attrib["classname"] = str(test_class) testcase.attrib["status"] = "Skip" testcase.attrib["name"] = str(self.name) skipped = SubElement(testcase, "skipped") skipped.text = "Skipped" @property def pass_count(self): """Number of passes recorded""" return self.passes @property def fail_count(self): """Number of fails recorded""" return self.fails @property def skip_count(self): """Number of skips recorded""" return self.skips @property def run_count(self): """Number of runs recorded""" return self.runs @property def attempt_count(self): """Number of attempts recorded""" return self.runs + self.skips def update(self, other): """Add results from another test result""" self.runs += other.runs self.passes += other.passes self.fails += other.fails self.skips += other.skips self._dirty = True def add_pass(self): """Record a pass""" self.passes += 1 self.runs += 1 self._dirty = True return self def add_skip(self): """Record a skip""" self.skips += 1 self._dirty = True return self def add_fail(self): """Record a fail""" self.fails += 1 self.runs += 1 self._dirty = True return self @property def unique_pass_count(self): """Report if all recorded results should be rolled up as a pass Reported as 1 or 0 to simplify accumulator logic higher in the stack """ if self.runs > 0 and self.fails == 0: return 1 return 0 @property def unique_fail_count(self): """Report if all recorded results should be rolled up as a fail Reported as 1 or 0 to simplify accumulator logic higher in the stack """ if self.fails > 0: return 1 return 0 @property def unique_skip_count(self): """Report if all recorded results should be rolled up as a skip Reported as 1 or 0 to simplify accumulator logic higher in the stack """ if self.skips > 0 and self.runs == 0: return 1 return 0 @property def unique_run_count(self): """Report if all recorded results should be rolled up as a run Reported as 1 or 0 to simplify accumulator logic higher in the stack """ if self.runs > 0: return 1 return 0 @property def unique_attempt_count(self): """Report if all recorded results should be rolled up as an attempt Reported as 1 or 0 to simplify accumulator logic higher in the stack """ if self.skips > 0 or self.runs > 0: return 1 return 0 class TestContainer: """A collection of test results""" def __init__(self): self.tests = {} self._dirty = True def clean(self): """Clear "dirty" flag for summary caching""" self._dirty = False for test in self.tests.values(): test.clean() @property def is_dirty(self): """Check "dirty" flag for summary caching""" if self._dirty: return True for test in self.tests.values(): if test.is_dirty: return True return False def _get_test(self, name): """find or create a test by name""" if name not in self.tests: self._dirty = True test = TestResult(name) self.tests[name] = test return self.tests[name] @property def unique_pass_count(self): """Report number of cases where for all tests with the same name all tests are passing""" count = 0 for test in self.tests.values(): count += test.unique_pass_count return count @property def unique_fail_count(self): """Report number of cases where for all tests with the same name at least one test is failing""" count = 0 for test in self.tests.values(): count += test.unique_fail_count return count @property def unique_skip_count(self): """Report number of cases where for all tests with the same name all tests are skipped""" count = 0 for test in self.tests.values(): count += test.unique_skip_count return count @property def unique_run_count(self): """Report number of cases where for all tests with the same name all tests not tests are skipped""" count = 0 for test in self.tests.values(): count += test.unique_run_count return count @property def unique_attempt_count(self): """Report number of cases where for all tests with the same name all tests have been run or skipped.""" count = 0 for test in self.tests.values(): count += test.unique_attempt_count return count @property def total_pass_count(self): """Report the total count of passes recorded for all tests""" count = 0 for test in self.tests.values(): count += test.pass_count return count @property def total_fail_count(self): """Report the total count of fails recorded for all tests""" count = 0 for test in self.tests.values(): count += test.fail_count return count @property def total_skip_count(self): """Report the total count of skips recorded for all tests""" count = 0 for test in self.tests.values(): count += test.skip_count return count @property def total_run_count(self): """Report the total count of runs recorded for all tests""" count = 0 for test in self.tests.values(): count += test.run_count return count @property def total_attempt_count(self): """Report the total count of attempts recorded for all tests""" count = 0 for test in self.tests.values(): count += test.attempt_count return count @property def test_count(self): """Report the total count tests recorded""" count = len(self.tests) return count def add_pass(self, name): """record a test passing""" return self._get_test(name).add_pass() def add_skip(self, name): """record a test skipped""" return self._get_test(name).add_skip() def add_fail(self, name): """record a test failing""" return self._get_test(name).add_fail() def update(self, other): """Add results from another test collection""" for name, test_result in other.tests.items(): result = self._get_test(name) result.update(test_result) class CIDictReader(csv.DictReader): """A Case insensitive DictReader. All names will be casefolded (aggressively lowercased)""" @property def fieldnames(self): """Access field names""" return [ field.strip().casefold() for field in csv.DictReader.fieldnames.fget(self) ] # pylint: disable=no-member def next(self): """Iterate""" return CIDictReader(csv.DictReader.next(self)) class TestSuite(TestContainer): """A collection of test results grouped together to represent a higher order result """ def __init__(self, name, location): super().__init__() self.name = name self.location = location def composite_result(self): """merge all suites under this object into one suite (degenerate case)""" return self def squash(self, location): """Rewrite to represent a single execution of each test""" self.location = location for test in self.tests.values(): test.squash() self._dirty = True def print_log(self): """Print summary to the log""" log_summary(self.name, self) def _read_robot_test(self, element, parent_name=None): """Read in results from a Robot Framework Test""" tests_found = 0 test_name = element.attrib["name"] if parent_name: suite_name = parent_name + "." + test_name else: suite_name = test_name for status in element.findall("./status"): status_name = status.attrib["status"].lower() if status_name == "pass": tests_found += 1 self.add_pass(suite_name) elif status_name == "skip": tests_found += 1 self.add_skip(suite_name) else: tests_found += 1 self.add_fail(suite_name) return tests_found def _read_robot_suite(self, element, parent_name=None): """Read in results from a Robot Framework Suite""" tests_found = 0 suite_name = element.attrib["name"] if parent_name: suite_name = parent_name + "." + suite_name for test in element.findall("./test"): tests_found += self._read_robot_test(test, suite_name) for suite in element.findall("./suite"): tests_found += self._read_robot_suite(suite, suite_name) return tests_found def _read_robot_result(self, root): """Read in results from a Robot Framework xml file root element""" tests_found = 0 for test in root.findall("./test"): tests_found += self._read_robot_test(test) for suite in root.findall("./suite"): tests_found += self._read_robot_suite(suite) return tests_found # pylint: disable=too-many-branches def _read_cdash_test(self, element): """Read in results from a CDash test report""" tests_found = 1 test_status = element.attrib["Status"].lower() test_name = None if not test_name: name_el = element.find("Name") if name_el is not None: test_name = name_el.text if not test_name: name_el = element.find("FullName") if name_el is not None: test_name = name_el.text if not test_name: name_el = element.find("FullCommandLine") if name_el is not None and name_el.text: test_name = name_el.text if not test_name: test_name = "unnamed" if test_status == "passed": self.add_pass(test_name) elif test_status == "failed": self.add_fail(test_name) elif test_status == "Timeout": self.add_fail(test_name) elif test_status == "OTHER_FAULT": self.add_fail(test_name) elif test_status == "notrun": self.add_skip(test_name) elif test_status == "Disabled": self.add_skip(test_name) else: self.add_fail(test_name) return tests_found def _read_cdash_result(self, root): """Read in results from a CDash test report root element""" tests_found = 0 for test in root.findall("./Testing/Test"): tests_found += self._read_cdash_test(test) return tests_found def _read_xunit_result(self, root): """Read in results from a JUnit xml file""" tests_found = 0 for test_case in root.findall("testcase"): name = "" if "classname" in test_case.attrib: classname = test_case.attrib["classname"].split(".") if classname: name += ".".join(classname) + "." name += test_case.attrib['name'] failure = test_case.find("failure") if failure is not None: tests_found += 1 self.add_fail(name) continue error = test_case.find("error") if error is not None: tests_found += 1 self.add_fail(name) continue skip = test_case.find("skipped") if skip is not None: tests_found += 1 self.add_skip(name) continue tests_found += 1 self.add_pass(name) return tests_found def _read_csv_result(self, result: str): """Read in results from a TeamCity test format csv file""" tests_found = 0 with open(result, newline='', encoding="utf-8") as csv_file: try: dialect = csv.Sniffer().sniff(csv_file.readline(), delimiters=';,\t') except: print(f"Error reading {result}", file=sys.stderr) raise csv_file.seek(0) reader = CIDictReader(csv_file, dialect=dialect) for row in reader: name = "" for suite_col in ['suite', 'suite name', 'test suite']: if suite_col in row: name += row[suite_col] break name += "." if name else "" for test_col in ['test', 'test name', 'test case', 'case']: if test_col in row: name += row[test_col] break status = None for status_col in ['status', 'result']: if status_col in row: status = row[status_col].casefold() break if name and status: if status in ['pass', 'passed', 'ok']: tests_found += 1 self.add_pass(name) elif status in ['fail', 'failed', 'error']: tests_found += 1 self.add_fail(name) elif status in ['skip', 'skipped', 'ignore', 'ignored']: tests_found += 1 self.add_skip(name) return tests_found def read_result(self, result: str, result_format: str): """Read in results from a file""" tests_found = 0 if result_format == "xml": tree = DET.parse(result) root = tree.getroot() if root.tag == "robot": tests_found = self._read_robot_result(root) elif root.tag in ["testsuite", "testsuites", "testcase"]: tests_found = self._read_xunit_result(root) elif root.tag in ["Site", "NewDataSet"]: tests_found = self._read_cdash_result(root) elif result_format == "csv": tests_found = self._read_csv_result(result) return tests_found def fill_xunit(self, suites_xml, package=None): """Write out suite as JUnit format XML""" testsuite = SubElement(suites_xml, "testsuite") if package is not None: testsuite.attrib["package"] = str(package) if self.name is not None: testsuite.attrib["name"] = str(self.name) if package is None: testsuite.attrib["name"] = str(self.name) else: testsuite.attrib["name"] = str(package) + "/" + self.name elif package is not None: testsuite.attrib["name"] = str(package) summary = self testsuite.attrib["tests"] = str(summary.total_run_count) testsuite.attrib["failures"] = str(summary.total_fail_count) testsuite.attrib["skipped"] = str(summary.total_skip_count) for test in self.tests.values(): test.fill_xunit(testsuite, test_class=package) class TestPackage: """Intermediate level Container for test information. Collects Packages, that hold Suites, that hold Results""" def __init__(self, name): self.name = name self.suites = [] self._composite = None self._dirty = True def clean(self): """Clear "dirty" flag for summary caching""" self._dirty = False for suite in self.suites: suite.clean() @property def is_dirty(self): """Check "dirty" flag for summary caching""" if self._dirty: return True for suite in self.suites: if suite.is_dirty: return True return False def composite_result(self): """merge all suites under this object into one suite""" if self.is_dirty: self._composite = TestSuite(self.name, None) for suite in self.suites: self._composite.update(suite) return self._composite def add_suite(self, suite: TestSuite): """Add a suite under this package""" self._dirty = True self.suites.append(suite) return suite def print_log(self): """Print summary to the log""" log_summary(self.name, self.composite_result()) with log_scope(): for suite in self.suites: suite.print_log() def scan_test_results(self, source: str, suite_name: str, suite_per_file=False): """Read through all files in a folder to find test results""" test_result_files = [] with pushd(source): test_result_files.extend([('xml', os.path.abspath(match)) for match in glob("*.xml")]) test_result_files.extend([('csv', os.path.abspath(match)) for match in glob("*.csv")]) count = 0 if not suite_per_file: suite = TestSuite(suite_name, source) for result_format, result_file in test_result_files: tests_found = None if suite_per_file: suite = TestSuite( suite_name + "/" + os.path.basename(result_file), result_file) tests_found = suite.read_result(result_file, result_format) if tests_found: count += 1 if suite_per_file: suite.squash(suite.location) self.add_suite(suite) if not suite_per_file: if count > 0: suite.squash(source) self.add_suite(suite) return count def scan_tests(self, folder_path: str, suite_name: str = None, suite_per_file=False): """Scan folder for test results.""" with log_scope(): self.scan_test_results(folder_path, suite_name, suite_per_file=suite_per_file) for folder in os.scandir(folder_path): if folder.is_dir(): if suite_name: sub_suite = f'{suite_name}/{folder.name}' else: sub_suite = folder.name self.scan_tests(folder.path, sub_suite, suite_per_file=suite_per_file) def fill_xunit(self, suites_xml): """Write out package as JUnit format XML""" for suite in self.suites: suite.fill_xunit(suites_xml, package=self.name) class TestReport: """Top level container for test information. Collects Packages, that hold Suites, that hold Results""" def __init__(self): self.packages = [] self._composite = None self._dirty = True def clean(self): """Clear "dirty" flag for summary caching""" self._dirty = False for package in self.packages: package.clean() @property def is_dirty(self): """Check "dirty" flag for summary caching""" if self._dirty: return True for package in self.packages: if package.is_dirty: return True return False def composite_result(self): """merge all suites under this object into one suite""" if self.is_dirty: self._composite = TestSuite("Report", None) for package in self.packages: self._composite.update(package.composite_result()) return self._composite def add_package(self, name): """Add a package to the report""" result = TestPackage(name) self.packages.append(result) return result def print_log(self): """Print summary to the log""" log_summary(None, self.composite_result()) with log_scope(): for package in self.packages: package.print_log() def scan_tests(self, folder_path: str, suite_per_file=False): """Scan a folder for test results""" for folder in os.scandir(folder_path): if not folder.is_dir(): continue test_suite = self.add_package(folder.name) test_suite.scan_tests(folder.path, suite_per_file=suite_per_file) def as_xunit(self): """Return the report represented as JUnit format XML""" suites_xml = Element('testsuites') summary = self.composite_result() suites_xml.attrib["tests"] = str(summary.total_run_count) suites_xml.attrib["failures"] = str(summary.total_fail_count) for package in self.packages: package.fill_xunit(suites_xml) return suites_xml def get_suite_defects(summary, package_defects): """Get the set of defects that apply to a given suite""" result = set() for test_name in summary.tests: if summary.tests[test_name].fail_count > 0: found = False for defect, tests in package_defects.items(): if test_name in tests: found = True result.add(defect) if not found: result.add("unknown") return result # pylint: disable=too-many-locals,too-many-nested-blocks,too-many-branches def write_excel_package_page(workbook: xlsxwriter.Workbook, package: TestPackage, report_all_tests=False, defects=None): """Write an Excel tab with package details""" if defects is None: defects = {} suite_header_fmt = workbook.add_format() suite_header_fmt.set_rotation(90) suite_header_fmt.set_bold(True) header_fmt = workbook.add_format() header_fmt.set_bold(True) summary = package.composite_result() sheet = workbook.add_worksheet() sheet.name = package.name sheet.set_column("A:A", 9) sheet.write_string("A1", "Jira", cell_format=header_fmt) sheet.set_column("B:B", 70) sheet.write_string("B1", "Test", cell_format=header_fmt) f_row = 0 f_col = 2 for suite in package.suites: suite_name = suite.name if suite_name is None: suite_name = "" sheet.set_column(f_col, f_col, 2.8) sheet.write_string(f_row, f_col, suite_name, cell_format=suite_header_fmt) f_col += 1 width = f_col f_row += 1 for test_name in summary.tests: if report_all_tests or summary.tests[test_name].fail_count > 0: f_col = 0 jiras = set() for jira, tests in defects.items(): if test_name in tests: jiras.add(jira) sheet.write_string(f_row, f_col, " ".join(sorted(jiras))) f_col += 1 sheet.write(f_row, f_col, test_name) f_col += 1 for suite in package.suites: if test_name in suite.tests: if report_all_tests: if suite.tests[test_name].unique_fail_count: sheet.write(f_row, f_col, "Fail") elif suite.tests[test_name].unique_skip_count: sheet.write(f_row, f_col, "Skip") elif suite.tests[test_name].unique_pass_count: sheet.write(f_row, f_col, "Pass") elif test_name in suite.tests and suite.tests[ test_name].fail_count > 0: sheet.write(f_row, f_col, "X") f_col += 1 f_row += 1 sheet.autofilter(0, 0, f_row - 1, width - 1) def write_excel_dashboard_structure(workbook, dashboard): """Write summary dashboard high level page structure""" col_break_fmt = workbook.add_format() col_break_fmt.bg_color = '#F2F2F2' col_break_fmt.set_left(1) col_break_fmt.set_right(1) header_break_fmt = workbook.add_format() header_break_fmt.set_bold(True) header_break_fmt.set_bottom(1) header_col_break_fmt = workbook.add_format() header_col_break_fmt.bg_color = '#F2F2F2' header_col_break_fmt.set_left(1) header_col_break_fmt.set_right(1) header_col_break_fmt.set_bottom(1) shared_header_fmt = workbook.add_format() shared_header_fmt.set_align('center') shared_header_fmt.set_bold(True) bold_fmt = workbook.add_format() bold_fmt.set_bold(True) dashboard.set_column("A:A", 6, cell_format=bold_fmt) dashboard.set_column("B:C", 6.5) dashboard.set_column("D:D", 1.9, cell_format=col_break_fmt) dashboard.set_column("E:E", 20.15) dashboard.set_column("F:M", 4.45) dashboard.set_column("N:N", 1.9, cell_format=col_break_fmt) dashboard.set_column("O:O", 48.25) dashboard.set_column("P:S", 4.45) dashboard.set_row(1, None, cell_format=header_break_fmt) dashboard.write_blank("D2", None, cell_format=header_col_break_fmt) dashboard.write_blank("N2", None, cell_format=header_col_break_fmt) for cells, header in [ ("A1:C1", "Summary"), ("F1:I1", "Total"), ("J1:M1", "Unique"), ]: dashboard.merge_range(cells, header, cell_format=shared_header_fmt) for col, header in [ ("B", "Count"), ("C", "Rate"), ("E", "Category"), ("F", "Tests"), ("G", "Pass"), ("H", "Fail"), ("I", "Skip"), ("J", "Tests"), ("K", "Pass"), ("L", "Fail"), ("M", "Skip"), ("O", "Suite"), ("P", "Tests"), ("Q", "Pass"), ("R", "Fail"), ("S", "Skip"), ("T", "Jiras"), ]: dashboard.write_string(f"{col}2", header) def write_excel_dashboard_summary(workbook, dashboard): """Write top level summary portion of dashboard""" footnote_fmt = workbook.add_format() footnote_fmt.set_font_size(8) footnote_fmt.set_align('left') footnote_fmt.set_align('top') rate_field_fmt = workbook.add_format() rate_field_fmt.set_num_format("0.00%") dashboard.write("A3", "Total") dashboard.write("A4", "Tests") dashboard.write_formula("B4", "=SUM(F:F)") dashboard.write("A5", "Pass*") dashboard.write_formula("B5", "=SUM(G:G)") dashboard.write_formula("C5", "=B5/SUM(B5:B6)", cell_format=rate_field_fmt) dashboard.write("A6", "Fail*") dashboard.write_formula("B6", "=SUM(H:H)") dashboard.write_formula("C6", "=B6/SUM(B5:B6)", cell_format=rate_field_fmt) dashboard.write("A7", "Skip") dashboard.write_formula("B7", "=SUM(I:I)") dashboard.write_formula("C7", "=B7/B4", cell_format=rate_field_fmt) dashboard.merge_range("A8:C8", "*Rate excludes skipped", cell_format=footnote_fmt) dashboard.write("A10", "Unique") dashboard.write("A11", "Tests") dashboard.write_formula("B11", "=SUM(J:J)") dashboard.write("A12", "Pass") dashboard.write_formula("B12", "=SUM(K:K)") dashboard.write_formula("C12", "=B12/SUM(B12:B13)", cell_format=rate_field_fmt) dashboard.write("A13", "Fail") dashboard.write_formula("B13", "=SUM(L:L)") dashboard.write_formula("C13", "=B13/SUM(B12:B13)", cell_format=rate_field_fmt) dashboard.write("A14", "Skip") dashboard.write_formula("B14", "=SUM(M:M)") dashboard.write_formula("C14", "=B14/B11", cell_format=rate_field_fmt) def write_excel_package_summary(workbook, dashboard, package, row): """Write summary for one package on dashboard""" # workbook is passed to allow access to book wide information like formats del workbook summary = package.composite_result() dashboard.write(f"E{row}", package.name) dashboard.write(f"F{row}", summary.total_attempt_count) dashboard.write(f"G{row}", summary.total_pass_count) dashboard.write(f"H{row}", summary.total_fail_count) dashboard.write(f"I{row}", summary.total_skip_count) dashboard.write(f"J{row}", summary.unique_attempt_count) dashboard.write(f"K{row}", summary.unique_pass_count) dashboard.write(f"L{row}", summary.unique_fail_count) dashboard.write(f"M{row}", summary.unique_skip_count) # pylint: disable=too-many-arguments,too-many-positional-arguments def write_excel_suite_summary(workbook, dashboard, suite, row, target_folder, package_defects): """Write summary for one suite on dashboard""" # workbook is passed to allow access to book wide information like formats del workbook summary = suite.composite_result() if suite.location is not None: location = os.path.relpath(suite.location, target_folder) location.replace("\\", "/") dashboard.write_url(f"O{row}", f"external:{location}", string=suite.name) else: dashboard.write(f"O{row}", suite.name) dashboard.write(f"P{row}", summary.total_attempt_count) dashboard.write(f"Q{row}", summary.total_pass_count) dashboard.write(f"R{row}", summary.total_fail_count) dashboard.write(f"S{row}", summary.total_skip_count) dashboard.write(f"T{row}", ",".join(get_suite_defects(summary, package_defects))) def write_excel(report: TestReport, target, report_all_tests=False, defects=None): """Write report as an Excel file""" if defects is None: defects = {} target = os.path.abspath(target) target_folder = os.path.dirname(target) workbook = xlsxwriter.Workbook(target) dash = workbook.add_worksheet() dash.name = "Dashboard" write_excel_dashboard_structure(workbook, dash) write_excel_dashboard_summary(workbook, dash) summary = report.composite_result() row = 3 for package in report.packages: package_defects = defects.get(package.name, {}) write_excel_package_summary(workbook, dash, package, row) row += 1 for suite in package.suites: write_excel_suite_summary(workbook, dash, suite, row, target_folder, package_defects) row += 1 if summary.total_fail_count > 0: write_excel_package_page(workbook, package, report_all_tests=report_all_tests, defects=package_defects) workbook.close() def read_command_line(cmd_line): """ Read command line arguments """ # Now read full arguments parser = argparse.ArgumentParser(description=globals()['__doc__'], formatter_class=RawTextHelpFormatter) parser.add_argument('source', help="Validation data dir", type=os.path.abspath, action='store') parser.add_argument('--defects', help="Database of known defects", type=os.path.abspath) parser.add_argument('--suite-per-file', help="Treat each result file as a separate suite", action='store_true') parser.add_argument('--report-all-tests', help="List all tests in summary report", action='store_true') # parse arguments (will exit here on invalid args or help) args = parser.parse_args(args=cmd_line) args.dest = args.source return args def load_defects(db_path): """Load defect database""" if not db_path or not os.path.isfile(db_path): return {} with open(db_path, newline='', encoding="utf-8") as db_file: json_data = json.load(db_file) return json_data def run(args): """main entry point""" validation_summary_path = os.path.join(args.dest, "validation-summary.xlsx") xunit_path = os.path.join(args.dest, "validation-summary-xunit.xml") report = TestReport() report.scan_tests(args.source, suite_per_file=args.suite_per_file) report.print_log() defects = load_defects(args.defects) if os.path.isfile(validation_summary_path): os.remove(validation_summary_path) write_excel(report, validation_summary_path, report_all_tests=args.report_all_tests, defects=defects) with open(xunit_path, "w", encoding="utf8") as xml_file: xunit_report = report.as_xunit() pretty_xunit_report = pretty_print_xml(xunit_report) xml_file.write(pretty_xunit_report) if __name__ == '__main__': run(read_command_line(None)) libvpl-tools-1.3.0/.github/workflows/summary/tools.sh000066400000000000000000000025261473010523400227330ustar00rootroot00000000000000#!/bin/bash ############################################################################### # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT ############################################################################### # Utility functions for artifact handling function copy_artifact() { source_dir="$source_root/$1" source_file="$2" dest_dir="$dest_root/$3" dest_file="${4:-$source_file}" if [ -f "$source_dir/$source_file" ]; then if [ ! -d "$dest_dir" ]; then mkdir -p "$dest_dir" fi cp "$source_dir/$source_file" "$dest_dir/$dest_file" fi } function copy_all_artifacts() { source_dir="$source_root/$1" dest_dir="$dest_root/$2" if [ -d "$source_dir" ]; then if [ ! -d "$dest_dir" ]; then mkdir -p "$dest_dir" fi cp -r "$source_dir/." "$dest_dir/" fi } function copy_glob_artifact() { local prev_shopt=$(shopt -p nullglob) shopt -s nullglob source_glob="$source_root/$1" dest_dir="$2" dest_file="$3" saved_IFS="$IFS" IFS= for f in $source_glob do f_rel="$(realpath -s --relative-to="$source_root" "$f")" f_dir="$(dirname "$f_rel")" f_name="$(basename "$f_rel")" copy_artifact "$f_rel" "$f_name" "$dest_dir" "$dest_file" done IFS="$saved_IFS" ${prev_shopt} }libvpl-tools-1.3.0/.github/workflows/trivy.yml000066400000000000000000000073571473010523400214510ustar00rootroot00000000000000--- name: Trivy permissions: read-all on: workflow_call: inputs: output_prefix: description: 'Prefix to add to output artifacts' required: false default: '' type: string jobs: scan: runs-on: [self-hosted, linux, docker] steps: - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * - name: Checkout PR branch uses: actions/checkout@v4 with: path: source - name: Pull docker image run: docker pull aquasec/trivy:0.48.3 - name: Create output location run: | mkdir artifact echo "Trivy Report" > artifact/trivy.txt - name: Scan run: | docker run \ -v /var/run/docker.sock:/var/run/docker.sock \ -v $HOME/Library/Caches:/root/.cache/ \ -v $(pwd):/work \ -w /work \ --attach stderr --attach stdout \ aquasec/trivy:0.51.1 \ fs \ --exit-code 1 \ --list-all-pkgs \ . >> artifact/trivy.txt - name: Scan for SDL Evidence if: (success() || failure()) run: | docker run \ -v /var/run/docker.sock:/var/run/docker.sock \ -v $HOME/Library/Caches:/root/.cache/ \ -v $(pwd):/work \ -w /work \ --attach stderr --attach stdout \ aquasec/trivy:0.51.1 \ fs \ --exit-code 1 \ --list-all-pkgs \ --format template \ --template "@/work/source/.github/workflows/trivy/csv.tpl" \ --output artifact/trivy-report.csv \ . - name: Lint Dockerfiles for SDL Evidence if: (success() || failure()) run: | docker run \ -v /var/run/docker.sock:/var/run/docker.sock \ -v $HOME/Library/Caches:/root/.cache/ \ -v $(pwd):/work \ -w /work \ --attach stderr --attach stdout \ aquasec/trivy:0.51.1 \ --ignorefile source/.trivyignore.yaml \ --format table --output artifact/ct248-report.txt \ config source docker run \ -v /var/run/docker.sock:/var/run/docker.sock \ -v $HOME/Library/Caches:/root/.cache/ \ -v $(pwd):/work \ -w /work \ --attach stderr --attach stdout \ aquasec/trivy:0.51.1 \ --ignorefile source/.trivyignore.yaml \ --format json --output artifact/ct248-report.json \ --exit-code 1 \ config source - name: Scan for SPDX for SBOM if: (success() || failure()) run: | docker run \ -v /var/run/docker.sock:/var/run/docker.sock \ -v $HOME/Library/Caches:/root/.cache/ \ -v $(pwd):/work \ -w /work \ --attach stderr --attach stdout \ aquasec/trivy:0.51.1 \ fs \ --exit-code 1 \ --list-all-pkgs \ --format spdx-json \ --output artifact/trivy-spdx.json \ source - name: Summarize if: (failure()) run: | echo '```' >> $GITHUB_STEP_SUMMARY cat artifact/ct248-report.txt >> $GITHUB_STEP_SUMMARY cat artifact/trivy.txt >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY - name: Report if: (success() || failure()) run: | cat artifact/trivy.txt - name: Record Artifacts uses: actions/upload-artifact@v4 if: (success() || failure()) with: name: ${{ inputs.output_prefix }}trivy path: artifact/* - name: Cleanup workspace run: sudo rm -rf ..?* .[!.]* * libvpl-tools-1.3.0/.github/workflows/trivy/000077500000000000000000000000001473010523400207125ustar00rootroot00000000000000libvpl-tools-1.3.0/.github/workflows/trivy/csv.tpl000066400000000000000000000015121473010523400222250ustar00rootroot00000000000000{{ range . }} Trivy Vulnerability Scan Results ({{- .Target -}}) VulnerabilityID,Severity,CVSS Score,Title,Library,Vulnerable Version,Fixed Version,Information URL,Triage Information {{ range .Vulnerabilities }} {{- .VulnerabilityID }}, {{- .Severity }}, {{- range $key, $value := .CVSS }} {{- if (eq $key "nvd") }} {{- .V3Score -}} {{- end }} {{- end }}, {{- quote .Title }}, {{- quote .PkgName }}, {{- quote .InstalledVersion }}, {{- quote .FixedVersion }}, {{- .PrimaryURL }} {{ else -}} No vulnerabilities found at this time. {{ end }} Trivy Dependency Scan Results ({{ .Target }}) ID,Name,Version,Notes {{ range .Packages -}} {{- quote .ID }}, {{- quote .Name }}, {{- quote .Version }} {{ else -}} No dependencies found at this time. {{ end }} {{ end }}libvpl-tools-1.3.0/.gitignore000066400000000000000000000002121473010523400161230ustar00rootroot00000000000000# build dirs _*/ ~*/ /build # auto-generated files *.pyc *.pyo *.pyd # ignore Editor files *.code-workspace # sample output files out.*libvpl-tools-1.3.0/.gitlint000066400000000000000000000100071473010523400156110ustar00rootroot00000000000000# Edit this file as you like. # # All these sections are optional. Each section with the exception of [general] # represents one rule and each key in it is an option for that specific rule. # # Rules and sections can be referenced by their full name or by id. For example # section "[body-max-line-length]" could be written as "[B1]". Full section # names are used in here for clarity. # [general] # Ignore certain rules, this example uses both full name and id # B6 body-is-missing # T5 title-must-not-contain-word ignore=B6, T5 # verbosity should be a value between 1 and 3, the commandline -v flags take # precedence over this # verbosity = 2 # By default gitlint will ignore merge, revert, fixup and squash commits. # ignore-merge-commits=true # ignore-revert-commits=true # ignore-fixup-commits=true # ignore-squash-commits=true # Ignore any data send to gitlint via stdin # ignore-stdin=true # Fetch additional meta-data from the local repository when manually passing a # commit message to gitlint via stdin or --commit-msg. Disabled by default. # staged=true # Enable debug mode (prints more output). Disabled by default. # debug=true # Enable community contributed rules # See http://jorisroovers.github.io/gitlint/contrib_rules for details # contrib=contrib-title-conventional-commits,CC1 # Set the extra-path where gitlint will search for user defined rules # See http://jorisroovers.github.io/gitlint/user_defined_rules for details extra-path=script/gitlint/custom_gitlint_rules.py [title-is-capitalized] [title-is-imperative] # This is an example of how to configure the "title-max-length" rule and # set the line-length it enforces to 80 [title-max-length] line-length=50 # [title-must-not-contain-word] # Comma-separated list of words that should not occur in the title. Matching is # case insensitive. It's fine if the keyword occurs as part of a larger word (so # "WIPING" will not cause a violation, but "WIP: my title" will. # words=wip [title-match-regex] # python like regex (https://docs.python.org/2/library/re.html) that the # commit-msg title must be matched to. # Note that the regex can contradict with other rules if not used correctly # (e.g. title-must-not-contain-word). # regex=^[A-Z].+ [body-max-line-length] line-length=72 [body-min-length] min-length=0 # [body-is-missing] # Whether to ignore this rule on merge commits (which typically only have a # title) # default = True # ignore-merge-commits=false # [body-changed-file-mention] # List of files that need to be explicitly mentioned in the body when they are # changed This is useful for when developers often erroneously edit certain # files or git submodules. By specifying this rule, developers can only change # the file when they explicitly reference it in the commit message. # files=gitlint/rules.py,README.md # [author-valid-email] # python like regex (https://docs.python.org/2/library/re.html) that the commit # author email address should be matched to For example, use the following regex # if you only want to allow email addresses from foo.com # regex=[^@]+@foo.com # [ignore-by-title] # Ignore certain rules for commits of which the title matches a regex # E.g. Match commit titles that start with "Release" # regex=^Release(.*) # Ignore certain rules, you can reference them by their id or by their full name # Use 'all' to ignore all rules # ignore=T1,body-min-length # [ignore-by-body] # Ignore certain rules for commits of which the body has a line that matches a # regex E.g. Match bodies that have a line that that contain "release" # regex=(.*)release(.*) # # Ignore certain rules, you can reference them by their id or by their full name # Use 'all' to ignore all rules # ignore=T1,body-min-length # This is a contrib rule - a community contributed rule. These are disabled by # default. You need to explicitly enable them one-by-one by adding them to the # "contrib" option under [general] section above. # [contrib-title-conventional-commits] # Specify allowed commit types. For details see: # https://www.conventionalcommits.org/ # types = bugfix,user-story,epiclibvpl-tools-1.3.0/.hadolint.yaml000066400000000000000000000002171473010523400167040ustar00rootroot00000000000000--- ignored: - DL3006 - DL3008 - DL3013 - DL3016 - DL3018 - DL3028 trustedRegistries: - docker.io - gcr.io - "*.redhat.com" libvpl-tools-1.3.0/.pre-commit-config.yaml000066400000000000000000000032151473010523400204220ustar00rootroot00000000000000# pre-commit configuration file # You may automatically run pre-commit when committing # by installing hooks as follows: # pre-commit install # pre-commit install --hook-type commit-msg --- fail_fast: false default_language_version: python: python3 exclude: 'ext/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.5.0 hooks: - id: check-yaml - repo: https://github.com/google/yapf rev: v0.40.2 hooks: - id: yapf - repo: https://github.com/PyCQA/pylint rev: v3.3.1 hooks: - id: pylint args: [ "--disable=import-error,no-name-in-module,duplicate-code", ] - repo: https://github.com/iconmaster5326/cmake-format-pre-commit-hook rev: v0.6.9 hooks: - id: cmake-format - repo: https://github.com/pre-commit/mirrors-clang-format rev: v14.0.6 hooks: - id: clang-format types_or: [c++, c] - repo: https://github.com/cpplint/cpplint rev: 1.5.5 hooks: - id: cpplint files: \.(h|hh|hpp|hxx|h|c|cc|cpp|cxx|c)$ - repo: https://github.com/jorisroovers/gitlint rev: v0.19.1 hooks: - id: gitlint - id: gitlint-ci # hook for CI environments args: [--commits, "origin/main..HEAD"] - repo: https://github.com/PyCQA/bandit rev: 1.7.7 hooks: - id: bandit args: ["-c", "bandit.yaml"] - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.23.2 hooks: - id: check-github-actions - id: check-github-workflows - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: - id: yamllint args: [--strict] libvpl-tools-1.3.0/.pylintrc000066400000000000000000000524501473010523400160130ustar00rootroot00000000000000[MAIN] # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Clear in-memory caches upon conclusion of linting. Useful if running pylint # in a server-like mode. clear-cache-post-run=no # Load and enable all available extensions. Use --list-extensions to see a list # all available extensions. #enable-all-extensions= # In error mode, messages with a category besides ERROR or FATAL are # suppressed, and no reports are done by default. Error mode is compatible with # disabling specific errors. #errors-only= # Always return a 0 (non-error) status code, even if lint errors are found. # This is primarily useful in continuous integration scripts. #exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-allow-list= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) extension-pkg-whitelist= # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. fail-on= # Specify a score threshold under which the program will exit with error. fail-under=10 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. #from-stdin= # Files or directories to be skipped. They should be base names, not paths. ignore=CVS # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows # format. Because '\\' represents the directory delimiter on Windows systems, # it can't be used as an escape character. ignore-paths= # Files or directories matching the regular expression patterns are skipped. # The regex matches against base names, not paths. The default value ignores # Emacs file locks ignore-patterns=^\.# # List of module names for which member attributes should not be checked and # will not be imported (useful for modules/projects where namespaces are # manipulated during runtime and thus existing member attributes cannot be # deduced by static analysis). It supports qualified module names, as well as # Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to # avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes # Resolve imports to .pyi stubs if available. May reduce no-member messages and # increase not-an-iterable messages. prefer-stubs=no # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. py-version=3.12 # Discover python modules and packages in the file system subtree. recursive=no # Add paths to the list of the source roots. Supports globbing patterns. The # source root is an absolute path or a path relative to the current working # directory used to determine a package namespace for modules located under the # source root. source-roots= # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # In verbose mode, extra non-checker-related info will be displayed. #verbose= [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. If left empty, argument names will be checked with the set # naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused bad-names-rgxs= # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. #class-attribute-rgx= # Naming style matching correct class constant names. class-const-naming-style=UPPER_CASE # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. #class-const-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. If left empty, constant names will be checked with the set naming # style. #const-rgx= # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. If left empty, function names will be checked with the set # naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted good-names-rgxs= # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Regular expression matching correct type alias names. If left empty, type # alias names will be checked with the set naming style. #typealias-rgx= # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. If left empty, variable names will be checked with the set # naming style. #variable-rgx= [CLASSES] # Warn about protected attribute access inside special methods check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [DESIGN] # List of regular expressions of class ancestor names to ignore when counting # public methods (see R0903) exclude-too-few-public-methods= # List of qualified class names to ignore when counting class parents (see # R0901) ignored-parents= # Maximum number of arguments for function / method. max-args=5 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=12 # Maximum number of locals for function / method body. max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of positional arguments for function / method. max-positional-arguments=5 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body. max-returns=6 # Maximum number of statements in function / method body. max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [EXCEPTIONS] # Exceptions that will emit a warning when caught. overgeneral-exceptions=builtins.BaseException,builtins.Exception [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=100 # Maximum number of lines in a module. max-module-lines=1000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # List of modules that can be imported at any level, not just the top level # one. allow-any-import-level= # Allow explicit reexports by alias from a package __init__. allow-reexport-from-package=no # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules= # Output a graph (.gv or any supported image format) of external dependencies # to the given file (report RP0402 must not be disabled). ext-import-graph= # Output a graph (.gv or any supported image format) of all (i.e. internal and # external) dependencies to the given file (report RP0402 must not be # disabled). import-graph= # Output a graph (.gv or any supported image format) of internal dependencies # to the given file (report RP0402 must not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Couples of modules and preferred modules, separated by a comma. preferred-modules= [LOGGING] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, # UNDEFINED. confidence=HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=raw-checker-failed, bad-inline-option, locally-disabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, use-symbolic-message-instead, use-implicit-booleaness-not-comparison-to-string, use-implicit-booleaness-not-comparison-to-zero # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable= [METHOD_ARGS] # List of qualified names (i.e., library.method) which require a timeout # parameter e.g. 'requests.api.get,requests.api.post' timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO # Regular expression of note tags to take in consideration. notes-rgx= [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit,argparse.parse_error # Let 'consider-using-join' be raised when the separator to join on would be # non-empty (resulting in expected fixes of the type: ``"- " + " - # ".join(items)``) suggest-join-with-non-empty-separator=yes [REPORTS] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'fatal', 'error', 'warning', 'refactor', # 'convention', and 'info' which contain the number of messages in each # category, as well as 'statement' which is the total number of statements # analyzed. This score is used by the global evaluation report (RP0004). evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. msg-template= # Set the output format. Available formats are: text, parseable, colorized, # json2 (improved json format), json (old json format) and msvs (visual # studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. score=yes [SIMILARITIES] # Comments are removed from the similarity computation ignore-comments=yes # Docstrings are removed from the similarity computation ignore-docstrings=yes # Imports are removed from the similarity computation ignore-imports=yes # Signatures are removed from the similarity computation ignore-signatures=yes # Minimum lines number of a similarity. min-similarity-lines=4 [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. No available dictionaries : You need to install # both the python package and the system dependency for enchant to work. spelling-dict= # List of comma separated words that should be considered directives if they # appear at the beginning of a comment and should not be checked. spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [STRING] # This flag controls whether inconsistent-quotes generates a warning when the # character used as a quote delimiter is used inconsistently within a module. check-quote-consistency=no # This flag controls whether the implicit-str-concat should generate a warning # on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of symbolic message names to ignore for Mixin members. ignored-checks-for-mixins=no-member, not-async-context-manager, not-context-manager, attribute-defined-outside-init # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 # Regex pattern to define which classes are considered mixins. mixin-class-rgx=.*[Mm]ixin # List of decorators that change the signature of a decorated function. signature-mutators= [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of names allowed to shadow builtins allowed-redefined-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io libvpl-tools-1.3.0/.style.yapf000066400000000000000000000252571473010523400162520ustar00rootroot00000000000000[style] # Align closing bracket with visual indentation. align_closing_bracket_with_visual_indent=True # Allow dictionary keys to exist on multiple lines. For example: # # x = { # ('this is the first element of a tuple', # 'this is the second element of a tuple'): # value, # } allow_multiline_dictionary_keys=False # Allow lambdas to be formatted on more than one line. allow_multiline_lambdas=False # Allow splitting before a default / named assignment in an argument list. allow_split_before_default_or_named_assigns=True # Allow splits before the dictionary value. allow_split_before_dict_value=True # Let spacing indicate operator precedence. For example: # # a = 1 * 2 + 3 / 4 # b = 1 / 2 - 3 * 4 # c = (1 + 2) * (3 - 4) # d = (1 - 2) / (3 + 4) # e = 1 * 2 - 3 # f = 1 + 2 + 3 + 4 # # will be formatted as follows to indicate precedence: # # a = 1*2 + 3/4 # b = 1/2 - 3*4 # c = (1+2) * (3-4) # d = (1-2) / (3+4) # e = 1*2 - 3 # f = 1 + 2 + 3 + 4 # arithmetic_precedence_indication=False # Number of blank lines surrounding top-level function and class # definitions. blank_lines_around_top_level_definition=2 # Insert a blank line before a class-level docstring. blank_line_before_class_docstring=False # Insert a blank line before a module docstring. blank_line_before_module_docstring=False # Insert a blank line before a 'def' or 'class' immediately nested # within another 'def' or 'class'. For example: # # class Foo: # # <------ this blank line # def method(): # ... blank_line_before_nested_class_or_def=False # Do not split consecutive brackets. Only relevant when # dedent_closing_brackets is set. For example: # # call_func_that_takes_a_dict( # { # 'key1': 'value1', # 'key2': 'value2', # } # ) # # would reformat to: # # call_func_that_takes_a_dict({ # 'key1': 'value1', # 'key2': 'value2', # }) coalesce_brackets=False # The column limit. column_limit=79 # The style for continuation alignment. Possible values are: # # - SPACE: Use spaces for continuation alignment. This is default behavior. # - FIXED: Use fixed number (CONTINUATION_INDENT_WIDTH) of columns # (ie: CONTINUATION_INDENT_WIDTH/INDENT_WIDTH tabs) for continuation # alignment. # - VALIGN-RIGHT: Vertically align continuation lines with indent # characters. Slightly right (one more indent character) if cannot # vertically align continuation lines with indent characters. # # For options FIXED, and VALIGN-RIGHT are only available when USE_TABS is # enabled. continuation_align_style=SPACE # Indent width used for line continuations. continuation_indent_width=4 # Put closing brackets on a separate line, dedented, if the bracketed # expression can't fit in a single line. Applies to all kinds of brackets, # including function definitions and calls. For example: # # config = { # 'key1': 'value1', # 'key2': 'value2', # } # <--- this bracket is dedented and on a separate line # # time_series = self.remote_client.query_entity_counters( # entity='dev3246.region1', # key='dns.query_latency_tcp', # transform=Transformation.AVERAGE(window=timedelta(seconds=60)), # start_ts=now()-timedelta(days=3), # end_ts=now(), # ) # <--- this bracket is dedented and on a separate line dedent_closing_brackets=False # Disable the heuristic which places each list element on a separate line # if the list is comma-terminated. disable_ending_comma_heuristic=False # Place each dictionary entry onto its own line. each_dict_entry_on_separate_line=True # The regex for an i18n comment. The presence of this comment stops # reformatting of that line, because the comments are required to be # next to the string they translate. i18n_comment= # The i18n function call names. The presence of this function stops # reformattting on that line, because the string it has cannot be moved # away from the i18n comment. i18n_function_call= # Indent blank lines. indent_blank_lines=False # Put closing brackets on a separate line, indented, if the bracketed # expression can't fit in a single line. Applies to all kinds of brackets, # including function definitions and calls. For example: # # config = { # 'key1': 'value1', # 'key2': 'value2', # } # <--- this bracket is indented and on a separate line # # time_series = self.remote_client.query_entity_counters( # entity='dev3246.region1', # key='dns.query_latency_tcp', # transform=Transformation.AVERAGE(window=timedelta(seconds=60)), # start_ts=now()-timedelta(days=3), # end_ts=now(), # ) # <--- this bracket is indented and on a separate line indent_closing_brackets=False # Indent the dictionary value if it cannot fit on the same line as the # dictionary key. For example: # # config = { # 'key1': # 'value1', # 'key2': value1 + # value2, # } indent_dictionary_value=False # The number of columns to use for indentation. indent_width=4 # Join short lines into one line. E.g., single line 'if' statements. join_multiple_lines=True # Do not include spaces around selected binary operators. For example: # # 1 + 2 * 3 - 4 / 5 # # will be formatted as follows when configured with "*,/": # # 1 + 2*3 - 4/5 no_spaces_around_selected_binary_operators= # Use spaces around default or named assigns. spaces_around_default_or_named_assign=False # Use spaces around the power operator. spaces_around_power_operator=False # The number of spaces required before a trailing comment. # This can be a single value (representing the number of spaces # before each trailing comment) or list of values (representing # alignment column values; trailing comments within a block will # be aligned to the first column value that is greater than the maximum # line length within the block). For example: # # With spaces_before_comment=5: # # 1 + 1 # Adding values # # will be formatted as: # # 1 + 1 # Adding values <-- 5 spaces between the end of the statement and comment # # With spaces_before_comment=15, 20: # # 1 + 1 # Adding values # two + two # More adding # # longer_statement # This is a longer statement # short # This is a shorter statement # # a_very_long_statement_that_extends_beyond_the_final_column # Comment # short # This is a shorter statement # # will be formatted as: # # 1 + 1 # Adding values <-- end of line comments in block aligned to col 15 # two + two # More adding # # longer_statement # This is a longer statement <-- end of line comments in block aligned to col 20 # short # This is a shorter statement # # a_very_long_statement_that_extends_beyond_the_final_column # Comment <-- the end of line comments are aligned based on the line length # short # This is a shorter statement # spaces_before_comment=2 # Insert a space between the ending comma and closing bracket of a list, # etc. space_between_ending_comma_and_closing_bracket=True # Split before arguments split_all_comma_separated_values=False # Split before arguments, but do not split all subexpressions recursively # (unless needed). split_all_top_level_comma_separated_values=False # Split before arguments if the argument list is terminated by a # comma. split_arguments_when_comma_terminated=False # Set to True to prefer splitting before '+', '-', '*', '/', '//', or '@' # rather than after. split_before_arithmetic_operator=False # Set to True to prefer splitting before '&', '|' or '^' rather than # after. split_before_bitwise_operator=True # Split before the closing bracket if a list or dict literal doesn't fit on # a single line. split_before_closing_bracket=True # Split before a dictionary or set generator (comp_for). For example, note # the split before the 'for': # # foo = { # variable: 'Hello world, have a nice day!' # for variable in bar if variable != 42 # } split_before_dict_set_generator=True # Split before the '.' if we need to split a longer expression: # # foo = ('This is a really long string: {}, {}, {}, {}'.format(a, b, c, d)) # # would reformat to something like: # # foo = ('This is a really long string: {}, {}, {}, {}' # .format(a, b, c, d)) split_before_dot=False # Split after the opening paren which surrounds an expression if it doesn't # fit on a single line. split_before_expression_after_opening_paren=False # If an argument / parameter list is going to be split, then split before # the first argument. split_before_first_argument=False # Set to True to prefer splitting before 'and' or 'or' rather than # after. split_before_logical_operator=True # Split named assignments onto individual lines. split_before_named_assigns=True # Set to True to split list comprehensions and generators that have # non-trivial expressions and multiple clauses before each of these # clauses. For example: # # result = [ # a_long_var + 100 for a_long_var in xrange(1000) # if a_long_var % 10] # # would reformat to something like: # # result = [ # a_long_var + 100 # for a_long_var in xrange(1000) # if a_long_var % 10] split_complex_comprehension=False # The penalty for splitting right after the opening bracket. split_penalty_after_opening_bracket=300 # The penalty for splitting the line after a unary operator. split_penalty_after_unary_operator=10000 # The penalty of splitting the line around the '+', '-', '*', '/', '//', # ``%``, and '@' operators. split_penalty_arithmetic_operator=300 # The penalty for splitting right before an if expression. split_penalty_before_if_expr=0 # The penalty of splitting the line around the '&', '|', and '^' # operators. split_penalty_bitwise_operator=300 # The penalty for splitting a list comprehension or generator # expression. split_penalty_comprehension=80 # The penalty for characters over the column limit. split_penalty_excess_character=7000 # The penalty incurred by adding a line split to the unwrapped line. The # more line splits added the higher the penalty. split_penalty_for_added_line_split=30 # The penalty of splitting a list of "import as" names. For example: # # from a_very_long_or_indented_module_name_yada_yad import (long_argument_1, # long_argument_2, # long_argument_3) # # would reformat to something like: # # from a_very_long_or_indented_module_name_yada_yad import ( # long_argument_1, long_argument_2, long_argument_3) split_penalty_import_names=0 # The penalty of splitting the line around the 'and' and 'or' # operators. split_penalty_logical_operator=300 # Use the Tab character for indentation. use_tabs=False libvpl-tools-1.3.0/.yamllint.yaml000066400000000000000000000001331473010523400167300ustar00rootroot00000000000000--- extends: default rules: comments-indentation: disable truthy: {check-keys: false} libvpl-tools-1.3.0/CHANGELOG.md000066400000000000000000000034571473010523400157620ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Intel® Video Processing Library (Intel® VPL) tools provide access to hardware accelerated video decode, encode, and frame processing capabilities on Intel® GPUs from the command line. ## [Unreleased] ## [1.3.0] - 2024-12-13 ### Added - Screen content coding tools for AV1 to `sample_encode` - GTK renderer option to `sample_decode` and `sample_multi_transcode` - `-fullscreen` option to `sample_decode` and `sample_multi_transcode` when using GTK. Enter fullscreen with Ctrl+f and exit with Esc - Improved support for Python 3.12 development environments. ### Fixed - Bootstrap to support Debian distributions that do not define `ID_LIKE`. ## [1.2.0] - 2024-08-30 ### Added - VVC decode support to `sample_decode` - Embedded version information to all shared libraries ### Changed - Metrics monitor library to now build statically by default ## [1.1.0] - 2024-06-28 ### Added - `MFX_SURFACE_TYPE_VULKAN_IMG2D` to vpl-inspect - YUV400 JPEG Enc for Linux VAAPI ### Fixed - va-attrib for vaapiallocator - D3D11 texture not being released in `val-surface-sharing` test tool ## [1.0.0] - 2024-04-26 ### Added - Intel® VPL API 2.11 support - Command line tools. They have been moved from the libvpl repository (https://github.com/intel/libvpl) [Unreleased]: https://github.com/intel/libvpl/compare/v1.3.0...HEAD [1.3.0]: https://github.com/intel/libvpl/compare/v1.2.0...v1.3.0 [1.2.0]: https://github.com/intel/libvpl/compare/v1.1.0...v1.2.0 [1.1.0]: https://github.com/intel/libvpl/compare/v1.0.0...v1.1.0 [1.0.0]: https://github.com/intel/libvpl/releases/tag/v1.0.0 libvpl-tools-1.3.0/CMakeLists.txt000066400000000000000000000173751473010523400167150ustar00rootroot00000000000000# ############################################################################## # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ############################################################################## cmake_minimum_required(VERSION 3.13.0) if(MSVC) cmake_policy(SET CMP0091 NEW) endif() file(STRINGS "version.txt" version_txt) project(vpl-tools VERSION ${version_txt}) set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_CURRENT_SOURCE_DIR}/cmake") # # Project configuration options # # Set default build type to Release if not specified if(NOT CMAKE_BUILD_TYPE) message(STATUS "Default CMAKE_BUILD_TYPE not set using Release") set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Choose build type from: None Debug Release RelWithDebInfo MinSizeRel" FORCE) endif() # More code hardening on Windows if(MSVC) if(${use_control_flow_guard}) message("Setting CONTROL FLOW GUARD") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /guard:cf") set(CMAKE_EXE_LINKER_FLAGS "/guard:cf /DYNAMICBASE") endif() if(${use_qspectre}) message("Setting QSPECTRE") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /Qspectre") endif() endif() # Project options option(BUILD_SHARED_LIBS "Build shared instead of static libraries." OFF) option(BUILD_TESTS "Build tests." OFF) set(BUILD_EXPERIMENTAL ON CACHE BOOL "Build tools with EXPERIMENTAL APIs.") # Test tool options for surface sharing if(WIN32) set(TOOLS_DEFAULT_SCREEN_CAPTURE ON) set(TOOLS_DEFAULT_RENDER ON) set(TOOLS_DEFAULT_OPENCL ON) set(TOOLS_DEFAULT_X11 OFF) else() set(TOOLS_DEFAULT_SCREEN_CAPTURE ON) set(TOOLS_DEFAULT_RENDER ON) set(TOOLS_DEFAULT_OPENCL OFF) set(TOOLS_DEFAULT_X11 ON) endif() option(TOOLS_ENABLE_SCREEN_CAPTURE "Enable screen capture in surface sharing test tools." ${TOOLS_DEFAULT_SCREEN_CAPTURE}) option(TOOLS_ENABLE_RENDER "Enable rendering in surface sharing test tools." ${TOOLS_DEFAULT_RENDER}) option(TOOLS_ENABLE_OPENCL "Enable OpenCL in surface sharing test tools." ${TOOLS_DEFAULT_OPENCL}) option(TOOLS_ENABLE_X11 "Enable X11 in surface sharing test tools." ${TOOLS_DEFAULT_X11}) option(USE_MSVC_STATIC_RUNTIME "Link MSVC runtime statically to all components." OFF) if(USE_MSVC_STATIC_RUNTIME) set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") endif() # Set output directories set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}) include(cmake/InstallDirs.cmake) include(cmake/PackageTarget.cmake) # Set POSITION_INDEPENDENT_CODE property for all targets set(CMAKE_POSITION_INDEPENDENT_CODE true) # Keep executable bits on shared objects when installing regardless of distro set(CMAKE_INSTALL_SO_NO_EXE 0) include(cmake/CompileOptions.cmake) if(BUILD_TESTS) include(CTest) enable_testing() # Build googletest set(BUILD_GMOCK OFF CACHE BOOL "" FORCE) set(INSTALL_GTEST OFF CACHE BOOL "" FORCE) set(gtest_disable_pthreads OFF CACHE BOOL "" FORCE) set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) set(gtest_hide_internal_symbols OFF CACHE BOOL "" FORCE) add_subdirectory(ext/googletest) add_library(GTest::gtest ALIAS gtest) add_library(GTest::gtest_main ALIAS gtest_main) endif() add_subdirectory(tools) install( FILES third-party-programs.txt DESTINATION ${VPL_INSTALL_LICENSEDIR} COMPONENT ${VPL_COMPONENT_LIB}) install( FILES LICENSE RENAME license.txt DESTINATION ${VPL_INSTALL_LICENSEDIR} COMPONENT ${VPL_COMPONENT_LIB}) set(CMAKE_INSTALL_SYSTEM_RUNTIME_COMPONENT ${VPL_COMPONENT_LIB}) include(InstallRequiredSystemLibraries) message( STATUS "---------------- Configuration summary ------------------------------" ) message(STATUS "CMake:") message(STATUS " CMAKE_VERSION : ${CMAKE_VERSION}") message(STATUS " CMAKE_GENERATOR : ${CMAKE_GENERATOR}") message(STATUS " CMAKE_BUILD_TOOL : ${CMAKE_BUILD_TOOL}") message(STATUS "Target:") message(STATUS " CMAKE_SYSTEM_NAME : ${CMAKE_SYSTEM_NAME}") message(STATUS " CMAKE_SYSTEM_VERSION : ${CMAKE_SYSTEM_VERSION}") message(STATUS " CMAKE_SYSTEM_PROCESSOR : ${CMAKE_SYSTEM_PROCESSOR}") message(STATUS "General:") message(STATUS " CMAKE_BUILD_TYPE : ${CMAKE_BUILD_TYPE}") message(STATUS " CMAKE_TOOLCHAIN_FILE : ${CMAKE_TOOLCHAIN_FILE}") message(STATUS " CMAKE_C_COMPILER : ${CMAKE_C_COMPILER}") message(STATUS " CMAKE_CXX_COMPILER : ${CMAKE_CXX_COMPILER}") if(CMAKE_SIZEOF_VOID_P EQUAL 8) message(STATUS " Build architecture : 64-bit") elseif(CMAKE_SIZEOF_VOID_P EQUAL 4) message(STATUS " Build architecture : 32-bit") endif() message(STATUS "Flags:") message(STATUS " CMAKE_C_FLAGS : ${CMAKE_C_FLAGS}") message(STATUS " CMAKE_CXX_FLAGS : ${CMAKE_CXX_FLAGS}") message(STATUS "Release flags:") message(STATUS " CMAKE_C_FLAGS_RELEASE : ${CMAKE_C_FLAGS_RELEASE}") message(STATUS " CMAKE_CXX_FLAGS_RELEASE : ${CMAKE_CXX_FLAGS_RELEASE}") message(STATUS "Debug flags:") message(STATUS " CMAKE_C_FLAGS_DEBUG : ${CMAKE_C_FLAGS_DEBUG}") message(STATUS " CMAKE_CXX_FLAGS_DEBUG : ${CMAKE_CXX_FLAGS_DEBUG}") message(STATUS "RelWithDebRT flags:") message( STATUS " CMAKE_C_FLAGS_RELWITHDEBRT : ${CMAKE_C_FLAGS_RELWITHDEBRT}") message( STATUS " CMAKE_CXX_FLAGS_RELWITHDEBRT : ${CMAKE_CXX_FLAGS_RELWITHDEBRT}") message(STATUS "Install:") message(STATUS " CMAKE_INSTALL_PREFIX : ${CMAKE_INSTALL_PREFIX}") message( STATUS " CMAKE_INSTALL_FULL_BINDIR : ${CMAKE_INSTALL_FULL_BINDIR}") message( STATUS " CMAKE_INSTALL_FULL_INCLUDEDIR : ${CMAKE_INSTALL_FULL_INCLUDEDIR}") message( STATUS " CMAKE_INSTALL_FULL_LIBDIR : ${CMAKE_INSTALL_FULL_LIBDIR}") message( STATUS " CMAKE_INSTALL_FULL_DOCDIR : ${CMAKE_INSTALL_FULL_DOCDIR}") message( STATUS " VPL_INSTALL_FULL_PKGCONFIGDIR : ${VPL_INSTALL_FULL_PKGCONFIGDIR}") message( STATUS " VPL_INSTALL_FULL_CMAKEDIR : ${VPL_INSTALL_FULL_CMAKEDIR}") message(STATUS " VPL_INSTALL_FULL_ENVDIR : ${VPL_INSTALL_FULL_ENVDIR}") message( STATUS " VPL_INSTALL_FULL_MODFILEDIR : ${VPL_INSTALL_FULL_MODFILEDIR}") message( STATUS " VPL_INSTALL_FULL_EXAMPLEDIR : ${VPL_INSTALL_FULL_EXAMPLEDIR}") message( STATUS " VPL_INSTALL_FULL_LICENSEDIR : ${VPL_INSTALL_FULL_LICENSEDIR}") message( STATUS " VPL_INSTALL_FULL_PYTHONDIR : ${VPL_INSTALL_FULL_PYTHONDIR}") message(STATUS "Build:") message(STATUS " BUILD_SHARED_LIBS : ${BUILD_SHARED_LIBS}") message(STATUS " BUILD_TESTS : ${BUILD_TESTS}") message(STATUS " BUILD_EXPERIMENTAL : ${BUILD_EXPERIMENTAL}") if(MSVC) message( STATUS " USE_MSVC_STATIC_RUNTIME : ${USE_MSVC_STATIC_RUNTIME}") endif() if(CMAKE_SYSTEM_NAME MATCHES Linux) message(STATUS " ENABLE_VA : ${ENABLE_VA}") message(STATUS " ENABLE_DRM : ${ENABLE_DRM}") message(STATUS " ENABLE_WAYLAND : ${ENABLE_WAYLAND}") message(STATUS " ENABLE_X11 : ${ENABLE_X11}") message(STATUS " ENABLE_DRI3 : ${ENABLE_DRI3}") message(STATUS " ENABLE_GTK4 : ${ENABLE_GTK4}") endif() message(STATUS "Surface sharing test tools:") message( STATUS " TOOLS_ENABLE_SCREEN_CAPTURE : ${TOOLS_ENABLE_SCREEN_CAPTURE}") message(STATUS " TOOLS_ENABLE_RENDER : ${TOOLS_ENABLE_RENDER}") message(STATUS " TOOLS_ENABLE_OPENCL : ${TOOLS_ENABLE_OPENCL}") message(STATUS " TOOLS_ENABLE_X11 : ${TOOLS_ENABLE_X11}") libvpl-tools-1.3.0/CONTRIBUTING.md000066400000000000000000000102241473010523400163700ustar00rootroot00000000000000# Contributing The Intel® Video Processing Library (Intel® VPL) tools project welcomes community contributions. You can: - Submit your changes directly as a [pull request](https://github.com/intel/libvpl-tools/pulls) - Log a bug or feature request with an [issue](https://github.com/intel/libvpl-tools/issues) ## Pull Request Checklist Before submitting your pull requests, please do the following: - Make sure your Pull Request includes these three sections in the description - **Issue** - link for the ticket from Jira/HSD/ETC or state the problem - **Solution** - Overview of your approach to solving the problem - **Test** - how you tested the change - Make sure you update CHANGELOG.md if the change is important. - Make sure your changes are consistent with the [philosophy for contribution](#philosophy-for-contribution). - Make sure the correct license is included at the top of new files. - Make sure your commits follow the correct [commit message style](#commit-message-style). - Make sure to test your code. At a minimum run `script/test`. - Make sure to [sign your work](#sign-your-work). ## Philosophy for Contribution Contributed code must be: - *Tested*: Include tests when you contribute new features, as they help to a) prove that your code works correctly, and b) guard against future breaking changes to lower the maintenance cost. Bug fixes also generally require tests, because the presence of bugs usually indicates insufficient test coverage. - *Documented*: Pull requests that change documented functionality should update the relevant documentation. New functionality should be documented. - *Portable*: The project supports different operating systems CPU and GPU architectures, compilers, and run-times. ## Commit Message Style 1. Separate subject from body with a blank line 2. Limit the subject line to 50 characters 3. Capitalize the subject line 4. Do not end the subject line with a period 5. Use the imperative mood in the subject line 6. Wrap the body at 72 characters 7. Use the body to explain what and why vs. how Further reading: [How to Write a Git Commit Message](https://chris.beams.io/posts/git-commit/) ## Sign Your Work Please use the sign-off line at the end of the patch. Your signature certifies that you wrote the patch or otherwise have the right to pass it on as an open-source patch. The rules are pretty simple: if you can certify the below (from [developercertificate.org](http://developercertificate.org/)): ``` Developer Certificate of Origin Version 1.1 Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 660 York Street, Suite 102, San Francisco, CA 94110 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Developer's Certificate of Origin 1.1 By making a contribution to this project, I certify that: (a) The contribution was created in whole or in part by me and I have the right to submit it under the open source license indicated in the file; or (b) The contribution is based upon previous work that, to the best of my knowledge, is covered under an appropriate open source license and I have the right under that license to submit that work with modifications, whether created in whole or in part by me, under the same open source license (unless I am permitted to submit under a different license), as indicated in the file; or (c) The contribution was provided directly to me by some other person who certified (a), (b) or (c) and I have not modified it. (d) I understand and agree that this project and the contribution are public and that a record of the contribution (including all personal information I submit with it, including my sign-off) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. ``` Then you just add a line to every git commit message: Signed-off-by: Kris Smith Use your real name (sorry, no pseudonyms or anonymous contributions.) If you set your `user.name` and `user.email` git configs, you can sign your commit automatically with `git commit -s`. libvpl-tools-1.3.0/CPPLINT.cfg000066400000000000000000000003041473010523400157270ustar00rootroot00000000000000set noparent # Let .clang-format determine rules that conflict with cpplint filter=-whitespace filter=-readability/braces # Ignore googles non-approved headers and functions filter=-build/c++11 libvpl-tools-1.3.0/LICENSE000066400000000000000000000020621473010523400151450ustar00rootroot00000000000000MIT License Copyright (c) 2020 Intel Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. libvpl-tools-1.3.0/README.md000066400000000000000000000050131473010523400154160ustar00rootroot00000000000000# Intel® Video Processing Library (Intel® VPL) Tools Intel® Video Processing Library (Intel® VPL) tools provide access to hardware accelerated video decode, encode, and processing capabilities on Intel® GPUs from the command line. The tools require the [Intel® VPL base library](https://github.com/intel/libvpl) and a runtime library installed. Current runtime implementations: - [Intel® VPL GPU Runtime](https://github.com/intel/vpl-gpu-rt) for use on Intel® Iris® Xe graphics and newer - [Intel® Media SDK](https://github.com/Intel-Media-SDK/MediaSDK) for use on legacy Intel graphics Follow the instructions on the respective repos to install the desired implementation. ## Build from Source Building the tools requires an installation of Intel® VPL development package. ### Build and install the Intel® VPL development package Linux: ``` git clone https://github.com/intel/libvpl pushd libvpl export VPL_INSTALL_DIR=`pwd`/../_vplinstall sudo script/bootstrap cmake -B _build -DCMAKE_INSTALL_PREFIX=$VPL_INSTALL_DIR cmake --build _build cmake --install _build popd ``` Windows cmd prompt: ``` git clone https://github.com/intel/libvpl pushd libvpl set VPL_INSTALL_DIR=%cd%\..\_vplinstall script\bootstrap.bat cmake -B _build -DCMAKE_INSTALL_PREFIX=%VPL_INSTALL_DIR% cmake --build _build --config Release cmake --install _build --config Release popd ``` > **Note:** bootstrap.bat requires [WinGet](https://github.com/microsoft/winget-cli) ### Build and install the Intel® VPL tools Linux: ``` git clone https://github.com/intel/libvpl-tools pushd libvpl-tools export VPL_INSTALL_DIR=`pwd`/../_vplinstall sudo script/bootstrap cmake -B _build -DCMAKE_PREFIX_PATH=$VPL_INSTALL_DIR cmake --build _build cmake --install _build --prefix $VPL_INSTALL_DIR ``` Windows cmd prompt: ``` git clone https://github.com/intel/libvpl-tools pushd libvpl-tools set VPL_INSTALL_DIR=%cd%\..\_vplinstall script\bootstrap.bat cmake -B _build -DCMAKE_PREFIX_PATH=%VPL_INSTALL_DIR% cmake --build _build --config Release cmake --install _build --config Release --prefix %VPL_INSTALL_DIR% ``` > **Note:** bootstrap.bat requires [WinGet](https://github.com/microsoft/winget-cli) ## License This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details. ## Security See the [Intel® Security Center](https://www.intel.com/content/www/us/en/security-center/default.html) for information on how to report a potential security issue or vulnerability. ## How to Contribute See [CONTRIBUTING.md](CONTRIBUTING.md) for more information. libvpl-tools-1.3.0/SECURITY.md000066400000000000000000000006251473010523400157340ustar00rootroot00000000000000# Security Policy Intel is committed to rapidly addressing security vulnerabilities affecting our customers and providing clear guidance on the solution, impact, severity and mitigation. ## Reporting a Vulnerability Please report any security vulnerabilities in this project [utilizing the guidelines here](https://www.intel.com/content/www/us/en/security-center/vulnerability-handling-guidelines.html). libvpl-tools-1.3.0/bandit.yaml000066400000000000000000000214311473010523400162660ustar00rootroot00000000000000# yamllint disable-file ### Bandit config file generated from: # './bandit/bandit/cli/config_generator.py --out ipas_default.config' ### This config may optionally select a subset of tests to run or skip by ### filling out the 'tests' and 'skips' lists given below. If no tests are ### specified for inclusion then it is assumed all tests are desired. The skips ### set will remove specific tests from the include set. This can be controlled ### using the -t/-s CLI options. Note that the same test ID should not appear ### in both 'tests' and 'skips', this would be nonsensical and is detected by ### Bandit at runtime. # Available tests: # B101 : assert_used # B102 : exec_used # B103 : set_bad_file_permissions # B104 : hardcoded_bind_all_interfaces # B105 : hardcoded_password_string # B106 : hardcoded_password_funcarg # B107 : hardcoded_password_default # B108 : hardcoded_tmp_directory # B110 : try_except_pass # B112 : try_except_continue # B201 : flask_debug_true # B301 : pickle # B302 : marshal # B303 : md5 # B304 : ciphers # B305 : cipher_modes # B306 : mktemp_q # B307 : eval # B308 : mark_safe # B310 : urllib_urlopen # B311 : random # B312 : telnetlib # B313 : xml_bad_cElementTree # B314 : xml_bad_ElementTree # B315 : xml_bad_expatreader # B316 : xml_bad_expatbuilder # B317 : xml_bad_sax # B318 : xml_bad_minidom # B319 : xml_bad_pulldom # B320 : xml_bad_etree # B321 : ftplib # B323 : unverified_context # B324 : hashlib_new_insecure_functions # B401 : import_telnetlib # B402 : import_ftplib # B403 : import_pickle # B404 : import_subprocess # B405 : import_xml_etree # B406 : import_xml_sax # B407 : import_xml_expat # B408 : import_xml_minidom # B409 : import_xml_pulldom # B410 : import_lxml # B411 : import_xmlrpclib # B412 : import_httpoxy # B413 : import_pycrypto # B501 : request_with_no_cert_validation # B502 : ssl_with_bad_version # B503 : ssl_with_bad_defaults # B504 : ssl_with_no_version # B505 : weak_cryptographic_key # B506 : yaml_load # B507 : ssh_no_host_key_verification # B601 : paramiko_calls # B602 : subprocess_popen_with_shell_equals_true # B603 : subprocess_without_shell_equals_true # B604 : any_other_function_with_shell_equals_true # B605 : start_process_with_a_shell # B606 : start_process_with_no_shell # B607 : start_process_with_partial_path # B608 : hardcoded_sql_expressions # B609 : linux_commands_wildcard_injection # B610 : django_extra_used # B611 : django_rawsql_used # B701 : jinja2_autoescape_false # B702 : use_of_mako_templates # B703 : django_mark_safe # (optional) list included test IDs here, eg '[B101, B406]': # IPAS Required Checkers. Do not disable these # Additional checkers may be added if desired tests: [ 'B301', 'B302', 'B303', 'B304', 'B305', 'B306', 'B308', 'B310', 'B311', 'B312', 'B313', 'B314', 'B315', 'B316', 'B317', 'B318', 'B319', 'B320', 'B321', 'B323', 'B324', 'B401', 'B402', 'B403', 'B404', 'B405', 'B406', 'B407', 'B408', 'B409', 'B410', 'B411', 'B412', 'B413'] # (optional) list skipped test IDs here, eg '[B101, B406]': # The following checkers are not required but be added to tests list if desired skips: [ 'B101', 'B102', 'B103', 'B104', 'B105', 'B106', 'B107', 'B108', 'B110', 'B112', 'B201', 'B501', 'B502', 'B503', 'B504', 'B505', 'B506', 'B507', 'B601', 'B602', 'B603', 'B604', 'B605', 'B606', 'B607', 'B608', 'B609', 'B610', 'B611', 'B701', 'B702', 'B703'] ### (optional) plugin settings - some test plugins require configuration data ### that may be given here, per-plugin. All bandit test plugins have a built in ### set of sensible defaults and these will be used if no configuration is ### provided. It is not necessary to provide settings for every (or any) plugin ### if the defaults are acceptable. any_other_function_with_shell_equals_true: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run assert_used: skips: [] hardcoded_tmp_directory: tmp_dirs: - /tmp - /var/tmp - /dev/shm linux_commands_wildcard_injection: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run ssl_with_bad_defaults: bad_protocol_versions: - PROTOCOL_SSLv2 - SSLv2_METHOD - SSLv23_METHOD - PROTOCOL_SSLv3 - PROTOCOL_TLSv1 - SSLv3_METHOD - TLSv1_METHOD ssl_with_bad_version: bad_protocol_versions: - PROTOCOL_SSLv2 - SSLv2_METHOD - SSLv23_METHOD - PROTOCOL_SSLv3 - PROTOCOL_TLSv1 - SSLv3_METHOD - TLSv1_METHOD start_process_with_a_shell: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run start_process_with_no_shell: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run start_process_with_partial_path: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run subprocess_popen_with_shell_equals_true: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run subprocess_without_shell_equals_true: no_shell: - os.execl - os.execle - os.execlp - os.execlpe - os.execv - os.execve - os.execvp - os.execvpe - os.spawnl - os.spawnle - os.spawnlp - os.spawnlpe - os.spawnv - os.spawnve - os.spawnvp - os.spawnvpe - os.startfile shell: - os.system - os.popen - os.popen2 - os.popen3 - os.popen4 - popen2.popen2 - popen2.popen3 - popen2.popen4 - popen2.Popen3 - popen2.Popen4 - commands.getoutput - commands.getstatusoutput subprocess: - subprocess.Popen - subprocess.call - subprocess.check_call - subprocess.check_output - subprocess.run try_except_continue: check_typed_exception: false try_except_pass: check_typed_exception: false weak_cryptographic_key: weak_key_size_dsa_high: 1024 weak_key_size_dsa_medium: 2048 weak_key_size_ec_high: 160 weak_key_size_ec_medium: 224 weak_key_size_rsa_high: 1024 weak_key_size_rsa_medium: 2048 libvpl-tools-1.3.0/cmake/000077500000000000000000000000001473010523400152205ustar00rootroot00000000000000libvpl-tools-1.3.0/cmake/CompileOptions.cmake000066400000000000000000000066661473010523400212040ustar00rootroot00000000000000# ############################################################################## # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ############################################################################## # # Set compilation options # if(MSVC) add_compile_options("$<$:/D_DEBUG>") else() add_compile_options("$<$:-D_DEBUG -O0 -g>") endif() if(ENABLE_WARNING_AS_ERROR) message(STATUS "Warnings as errors enabled") set(MFX_DEPRECATED_OFF 1) endif() if(DEFINED ENV{MFX_DEPRECATED_OFF}) set(MFX_DEPRECATED_OFF 1) endif() if(MFX_DEPRECATED_OFF) message(STATUS "Deprecation warnings disabled") add_definitions(-DMFX_DEPRECATED_OFF) endif() set(CMAKE_POSITION_INDEPENDENT_CODE ON) if(MSVC) add_link_options("/guard:cf") add_link_options("/DYNAMICBASE") if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "8") add_link_options("/HIGHENTROPYVA") endif() add_link_options("/LARGEADDRESSAWARE") add_link_options("/NXCOMPAT") if(ENABLE_WARNING_AS_ERROR) add_compile_options("/WX") endif() add_compile_options("/GS") add_compile_options("/guard:cf") else() if(UNIX) set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pie") endif() add_compile_options("-Wformat") add_compile_options("-Wformat-security") add_compile_options("-Werror=format-security") if(NOT MINGW) string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE) if(NOT CMAKE_BUILD_TYPE MATCHES debug) add_definitions("-D_FORTIFY_SOURCE=2") endif() endif() if(NOT MINGW) add_compile_options("-fstack-protector-strong") endif() if(NOT MINGW) add_link_options("-Wl,-z,relro,-z,now,-z,noexecstack") endif() add_compile_options("-Wall") if(ENABLE_WARNING_AS_ERROR) add_compile_options("-Werror") endif() endif() # Define RelWithDebRT Build Mode if(MSVC) set(CMAKE_DEBUG_POSTFIX "d") set(CMAKE_RELWITHDEBRT_POSTFIX "d") set(CMAKE_CXX_FLAGS_RELWITHDEBRT "${CMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "Flags used by the C++ compiler during RelWithDebRT builds." FORCE) set(CMAKE_C_FLAGS_RELWITHDEBRT "${CMAKE_C_FLAGS_RELEASE}" CACHE STRING "Flags used by the C compiler during RelWithDebRT builds." FORCE) foreach(build_flag CMAKE_CXX_FLAGS_RELWITHDEBRT CMAKE_C_FLAGS_RELWITHDEBRT) string(REPLACE "/MDd" "" ${build_flag} "${${build_flag}}") string(REPLACE "/MTd" "" ${build_flag} "${${build_flag}}") string(REPLACE "/MD" "" ${build_flag} "${${build_flag}}") string(REPLACE "/MT" "" ${build_flag} "${${build_flag}}") set(${build_flag} "${${build_flag}} /MDd /DEBUG:NONE") endforeach() set(CMAKE_EXE_LINKER_FLAGS_RELWITHDEBRT "${CMAKE_EXE_LINKER_FLAGS_RELEASE}" CACHE STRING "Flags used for linking binaries during RelWithDebRT builds." FORCE) set(CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBRT "${CMAKE_SHARED_LINKER_FLAGS_RELEASE}" CACHE STRING "Flags used by the shared libraries linker during RelWithDebRT builds." FORCE) set(CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBRT "${CMAKE_MODULE_LINKER_FLAGS_RELEASE}" CACHE STRING "Flags used by the linker during RelWithDebRT builds." FORCE) mark_as_advanced( CMAKE_CXX_FLAGS_RELWITHDEBRT CMAKE_C_FLAGS_RELWITHDEBRT CMAKE_EXE_LINKER_FLAGS_RELWITHDEBRT CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBRT) # Add RelWithDebRT to avalible config types set(CMAKE_CONFIGURATION_TYPES "${CMAKE_CONFIGURATION_TYPES};RelWithDebRT") endif() libvpl-tools-1.3.0/cmake/InstallDirs.cmake000066400000000000000000000067531473010523400204650ustar00rootroot00000000000000# ############################################################################## # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ############################################################################## # # Set installation directories # if(WIN32 AND NOT CMAKE_CROSSCOMPILING AND CMAKE_SIZEOF_VOID_P EQUAL 4) if(NOT CMAKE_INSTALL_BINDIR) set(BINARCH x86) endif() if(NOT CMAKE_INSTALL_LIBDIR) set(LIBARCH x86) endif() endif() # See https://cmake.org/cmake/help/latest/module/GNUInstallDirs.html for # variables GNUInstallDirs exposes. This project commonly uses: # CMAKE_INSTALL_INCLUDEDIR CMAKE_INSTALL_DOCDIR CMAKE_INSTALL_BINDIR # CMAKE_INSTALL_LIBDIR include(GNUInstallDirs) set(CMAKE_INSTALL_DOCDIR ${CMAKE_INSTALL_DATAROOTDIR}/doc/${PROJECT_NAME}) gnuinstalldirs_get_absolute_install_dir(CMAKE_INSTALL_FULL_DOCDIR CMAKE_INSTALL_DOCDIR DOCDIR) if(WIN32 AND CMAKE_SIZEOF_VOID_P EQUAL 4) set(CMAKE_INSTALL_BINDIR ${CMAKE_INSTALL_BINDIR}/${BINARCH} CACHE PATH "user executables" FORCE) set(CMAKE_INSTALL_LIBDIR ${CMAKE_INSTALL_LIBDIR}/${LIBARCH} CACHE PATH "Object code libraries" FORCE) foreach(dir LIBDIR BINDIR) gnuinstalldirs_get_absolute_install_dir(CMAKE_INSTALL_FULL_${dir} CMAKE_INSTALL_${dir} ${dir}) endforeach() endif() if(NOT VPL_INSTALL_PKGCONFIGDIR) set(VPL_INSTALL_PKGCONFIGDIR ${CMAKE_INSTALL_LIBDIR}/pkgconfig) set(VPL_INSTALL_FULL_PKGCONFIGDIR ${CMAKE_INSTALL_FULL_LIBDIR}/pkgconfig) else() gnuinstalldirs_get_absolute_install_dir( VPL_INSTALL_FULL_PKGCONFIGDIR VPL_INSTALL_PKGCONFIGDIR VPL_INSTALL_PKGCONFIGDIR) endif() if(NOT VPL_INSTALL_CMAKEDIR) set(VPL_INSTALL_CMAKEDIR ${CMAKE_INSTALL_LIBDIR}/cmake) set(VPL_INSTALL_FULL_CMAKEDIR ${CMAKE_INSTALL_FULL_LIBDIR}/cmake) else() gnuinstalldirs_get_absolute_install_dir( VPL_INSTALL_FULL_CMAKEDIR VPL_INSTALL_CMAKEDIR VPL_INSTALL_CMAKEDIR) endif() if(NOT VPL_INSTALL_ENVDIR) set(VPL_INSTALL_ENVDIR ${CMAKE_INSTALL_SYSCONFDIR}/${PROJECT_NAME}) set(VPL_INSTALL_FULL_ENVDIR ${CMAKE_INSTALL_FULL_SYSCONFDIR}/${PROJECT_NAME}) else() gnuinstalldirs_get_absolute_install_dir(VPL_INSTALL_FULL_ENVDIR VPL_INSTALL_ENVDIR VPL_INSTALL_ENVDIR) endif() if(NOT VPL_INSTALL_MODFILEDIR) set(VPL_INSTALL_MODFILEDIR ${CMAKE_INSTALL_SYSCONFDIR}/modulefiles) set(VPL_INSTALL_FULL_MODFILEDIR ${CMAKE_INSTALL_FULL_SYSCONFDIR}/modulefiles) else() gnuinstalldirs_get_absolute_install_dir( VPL_INSTALL_FULL_MODFILEDIR VPL_INSTALL_MODFILEDIR VPL_INSTALL_MODFILEDIR) endif() if(NOT VPL_INSTALL_EXAMPLEDIR) set(VPL_INSTALL_EXAMPLEDIR ${CMAKE_INSTALL_DATAROOTDIR}/${PROJECT_NAME}/examples) set(VPL_INSTALL_FULL_EXAMPLEDIR ${CMAKE_INSTALL_FULL_DATAROOTDIR}/${PROJECT_NAME}/examples) else() gnuinstalldirs_get_absolute_install_dir( VPL_INSTALL_FULL_EXAMPLEDIR VPL_INSTALL_EXAMPLEDIR VPL_INSTALL_EXAMPLEDIR) endif() if(NOT VPL_INSTALL_LICENSEDIR) set(VPL_INSTALL_LICENSEDIR ${CMAKE_INSTALL_DATAROOTDIR}/${PROJECT_NAME}/licensing) set(VPL_INSTALL_FULL_LICENSEDIR ${CMAKE_INSTALL_FULL_DATAROOTDIR}/${PROJECT_NAME}/licensing) else() gnuinstalldirs_get_absolute_install_dir( VPL_INSTALL_FULL_LICENSEDIR VPL_INSTALL_LICENSEDIR VPL_INSTALL_LICENSEDIR) endif() if(WIN32) set(CMAKE_INSTALL_SYSTEM_RUNTIME_DESTINATION ${CMAKE_INSTALL_BINDIR}) else() set(CMAKE_INSTALL_SYSTEM_RUNTIME_DESTINATION ${CMAKE_INSTALL_LIBDIR}) endif() libvpl-tools-1.3.0/cmake/PackageTarget.cmake000066400000000000000000000017331473010523400207300ustar00rootroot00000000000000# ############################################################################## # Copyright (C) Intel Corporation # # SPDX-License-Identifier: MIT # ############################################################################## # # Packaging # set(CPACK_GENERATOR "ZIP") set(CPACK_ARCHIVE_COMPONENT_INSTALL ON) set(CPACK_PACKAGE_DIRECTORY ${PROJECT_BINARY_DIR}) set(CPACK_PACKAGE_VERSION_MAJOR ${PROJECT_VERSION_MAJOR}) set(CPACK_PACKAGE_VERSION_MINOR ${PROJECT_VERSION_MINOR}) set(CPACK_PACKAGE_VERSION_PATCH ${PROJECT_VERSION_PATCH}) set(VPL_COMPONENT_DEV dev) set(VPL_COMPONENT_LIB lib) set(VPL_COMPONENT_TOOLS tools) set(CPACK_COMPONENTS_ALL all ${VPL_COMPONENT_DEV} ${VPL_COMPONENT_LIB} ${VPL_COMPONENT_TOOLS}) set(CPACK_COMPONENTS_GROUPING IGNORE) include(CPack) cpack_add_component(${VPL_COMPONENT_LIB}) cpack_add_component(${VPL_COMPONENT_DEV} DEPENDS ${VPL_COMPONENT_LIB}) cpack_add_component(${VPL_COMPONENT_TOOLS} DEPENDS ${VPL_COMPONENT_LIB}) libvpl-tools-1.3.0/ext/000077500000000000000000000000001473010523400147405ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/000077500000000000000000000000001473010523400171145ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/.clang-format000066400000000000000000000001641473010523400214700ustar00rootroot00000000000000# Run manually to reformat a file: # clang-format -i --style=file Language: Cpp BasedOnStyle: Google libvpl-tools-1.3.0/ext/googletest/.gitignore000066400000000000000000000030311473010523400211010ustar00rootroot00000000000000# Ignore CI build directory build/ xcuserdata cmake-build-debug/ .idea/ bazel-bin bazel-genfiles bazel-googletest bazel-out bazel-testlogs # python *.pyc # Visual Studio files .vs *.sdf *.opensdf *.VC.opendb *.suo *.user _ReSharper.Caches/ Win32-Debug/ Win32-Release/ x64-Debug/ x64-Release/ # Ignore autoconf / automake files Makefile.in aclocal.m4 configure build-aux/ autom4te.cache/ googletest/m4/libtool.m4 googletest/m4/ltoptions.m4 googletest/m4/ltsugar.m4 googletest/m4/ltversion.m4 googletest/m4/lt~obsolete.m4 googlemock/m4 # Ignore generated directories. googlemock/fused-src/ googletest/fused-src/ # macOS files .DS_Store googletest/.DS_Store googletest/xcode/.DS_Store # Ignore cmake generated directories and files. CMakeFiles CTestTestfile.cmake Makefile cmake_install.cmake googlemock/CMakeFiles googlemock/CTestTestfile.cmake googlemock/Makefile googlemock/cmake_install.cmake googlemock/gtest /bin /googlemock/gmock.dir /googlemock/gmock_main.dir /googlemock/RUN_TESTS.vcxproj.filters /googlemock/RUN_TESTS.vcxproj /googlemock/INSTALL.vcxproj.filters /googlemock/INSTALL.vcxproj /googlemock/gmock_main.vcxproj.filters /googlemock/gmock_main.vcxproj /googlemock/gmock.vcxproj.filters /googlemock/gmock.vcxproj /googlemock/gmock.sln /googlemock/ALL_BUILD.vcxproj.filters /googlemock/ALL_BUILD.vcxproj /lib /Win32 /ZERO_CHECK.vcxproj.filters /ZERO_CHECK.vcxproj /RUN_TESTS.vcxproj.filters /RUN_TESTS.vcxproj /INSTALL.vcxproj.filters /INSTALL.vcxproj /googletest-distribution.sln /CMakeCache.txt /ALL_BUILD.vcxproj.filters /ALL_BUILD.vcxproj libvpl-tools-1.3.0/ext/googletest/BUILD.bazel000066400000000000000000000144321473010523400207760ustar00rootroot00000000000000# Copyright 2017 Google Inc. # All Rights Reserved. # # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Bazel Build for Google C++ Testing Framework(Google Test) package(default_visibility = ["//visibility:public"]) licenses(["notice"]) exports_files(["LICENSE"]) config_setting( name = "qnx", constraint_values = ["@platforms//os:qnx"], ) config_setting( name = "windows", constraint_values = ["@platforms//os:windows"], ) config_setting( name = "freebsd", constraint_values = ["@platforms//os:freebsd"], ) config_setting( name = "openbsd", constraint_values = ["@platforms//os:openbsd"], ) config_setting( name = "msvc_compiler", flag_values = { "@bazel_tools//tools/cpp:compiler": "msvc-cl", }, visibility = [":__subpackages__"], ) config_setting( name = "has_absl", values = {"define": "absl=1"}, ) # Library that defines the FRIEND_TEST macro. cc_library( name = "gtest_prod", hdrs = ["googletest/include/gtest/gtest_prod.h"], includes = ["googletest/include"], ) # Google Test including Google Mock cc_library( name = "gtest", srcs = glob( include = [ "googletest/src/*.cc", "googletest/src/*.h", "googletest/include/gtest/**/*.h", "googlemock/src/*.cc", "googlemock/include/gmock/**/*.h", ], exclude = [ "googletest/src/gtest-all.cc", "googletest/src/gtest_main.cc", "googlemock/src/gmock-all.cc", "googlemock/src/gmock_main.cc", ], ), hdrs = glob([ "googletest/include/gtest/*.h", "googlemock/include/gmock/*.h", ]), copts = select({ ":qnx": [], ":windows": [], "//conditions:default": ["-pthread"], }), defines = select({ ":has_absl": ["GTEST_HAS_ABSL=1"], "//conditions:default": [], }), features = select({ ":windows": ["windows_export_all_symbols"], "//conditions:default": [], }), includes = [ "googlemock", "googlemock/include", "googletest", "googletest/include", ], linkopts = select({ ":qnx": ["-lregex"], ":windows": [], ":freebsd": [ "-lm", "-pthread", ], ":openbsd": [ "-lm", "-pthread", ], "//conditions:default": ["-pthread"], }), deps = select({ ":has_absl": [ "@com_google_absl//absl/debugging:failure_signal_handler", "@com_google_absl//absl/debugging:stacktrace", "@com_google_absl//absl/debugging:symbolize", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", "@com_google_absl//absl/flags:reflection", "@com_google_absl//absl/flags:usage", "@com_google_absl//absl/strings", "@com_google_absl//absl/types:any", "@com_google_absl//absl/types:optional", "@com_google_absl//absl/types:variant", "@com_googlesource_code_re2//:re2", ], "//conditions:default": [], }), ) cc_library( name = "gtest_main", srcs = ["googlemock/src/gmock_main.cc"], features = select({ ":windows": ["windows_export_all_symbols"], "//conditions:default": [], }), deps = [":gtest"], ) # The following rules build samples of how to use gTest. cc_library( name = "gtest_sample_lib", srcs = [ "googletest/samples/sample1.cc", "googletest/samples/sample2.cc", "googletest/samples/sample4.cc", ], hdrs = [ "googletest/samples/prime_tables.h", "googletest/samples/sample1.h", "googletest/samples/sample2.h", "googletest/samples/sample3-inl.h", "googletest/samples/sample4.h", ], features = select({ ":windows": ["windows_export_all_symbols"], "//conditions:default": [], }), ) cc_test( name = "gtest_samples", size = "small", # All Samples except: # sample9 (main) # sample10 (main and takes a command line option and needs to be separate) srcs = [ "googletest/samples/sample1_unittest.cc", "googletest/samples/sample2_unittest.cc", "googletest/samples/sample3_unittest.cc", "googletest/samples/sample4_unittest.cc", "googletest/samples/sample5_unittest.cc", "googletest/samples/sample6_unittest.cc", "googletest/samples/sample7_unittest.cc", "googletest/samples/sample8_unittest.cc", ], linkstatic = 0, deps = [ "gtest_sample_lib", ":gtest_main", ], ) cc_test( name = "sample9_unittest", size = "small", srcs = ["googletest/samples/sample9_unittest.cc"], deps = [":gtest"], ) cc_test( name = "sample10_unittest", size = "small", srcs = ["googletest/samples/sample10_unittest.cc"], deps = [":gtest"], ) libvpl-tools-1.3.0/ext/googletest/CMakeLists.txt000066400000000000000000000015341473010523400216570ustar00rootroot00000000000000# Note: CMake support is community-based. The maintainers do not use CMake # internally. cmake_minimum_required(VERSION 3.5) if (POLICY CMP0048) cmake_policy(SET CMP0048 NEW) endif (POLICY CMP0048) if (POLICY CMP0077) cmake_policy(SET CMP0077 NEW) endif (POLICY CMP0077) project(googletest-distribution) set(GOOGLETEST_VERSION 1.12.1) if(NOT CYGWIN AND NOT MSYS AND NOT ${CMAKE_SYSTEM_NAME} STREQUAL QNX) set(CMAKE_CXX_EXTENSIONS OFF) endif() enable_testing() include(CMakeDependentOption) include(GNUInstallDirs) #Note that googlemock target already builds googletest option(BUILD_GMOCK "Builds the googlemock subproject" ON) option(INSTALL_GTEST "Enable installation of googletest. (Projects embedding googletest may want to turn this OFF.)" ON) if(BUILD_GMOCK) add_subdirectory( googlemock ) else() add_subdirectory( googletest ) endif() libvpl-tools-1.3.0/ext/googletest/CONTRIBUTING.md000066400000000000000000000130621473010523400213470ustar00rootroot00000000000000# How to become a contributor and submit your own code ## Contributor License Agreements We'd love to accept your patches! Before we can take them, we have to jump a couple of legal hurdles. Please fill out either the individual or corporate Contributor License Agreement (CLA). * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA](https://developers.google.com/open-source/cla/individual). * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA](https://developers.google.com/open-source/cla/corporate). Follow either of the two links above to access the appropriate CLA and instructions for how to sign and return it. Once we receive it, we'll be able to accept your pull requests. ## Are you a Googler? If you are a Googler, please make an attempt to submit an internal contribution rather than a GitHub Pull Request. If you are not able to submit internally, a PR is acceptable as an alternative. ## Contributing A Patch 1. Submit an issue describing your proposed change to the [issue tracker](https://github.com/google/googletest/issues). 2. Please don't mix more than one logical change per submittal, because it makes the history hard to follow. If you want to make a change that doesn't have a corresponding issue in the issue tracker, please create one. 3. Also, coordinate with team members that are listed on the issue in question. This ensures that work isn't being duplicated and communicating your plan early also generally leads to better patches. 4. If your proposed change is accepted, and you haven't already done so, sign a Contributor License Agreement ([see details above](#contributor-license-agreements)). 5. Fork the desired repo, develop and test your code changes. 6. Ensure that your code adheres to the existing style in the sample to which you are contributing. 7. Ensure that your code has an appropriate set of unit tests which all pass. 8. Submit a pull request. ## The Google Test and Google Mock Communities The Google Test community exists primarily through the [discussion group](http://groups.google.com/group/googletestframework) and the GitHub repository. Likewise, the Google Mock community exists primarily through their own [discussion group](http://groups.google.com/group/googlemock). You are definitely encouraged to contribute to the discussion and you can also help us to keep the effectiveness of the group high by following and promoting the guidelines listed here. ### Please Be Friendly Showing courtesy and respect to others is a vital part of the Google culture, and we strongly encourage everyone participating in Google Test development to join us in accepting nothing less. Of course, being courteous is not the same as failing to constructively disagree with each other, but it does mean that we should be respectful of each other when enumerating the 42 technical reasons that a particular proposal may not be the best choice. There's never a reason to be antagonistic or dismissive toward anyone who is sincerely trying to contribute to a discussion. Sure, C++ testing is serious business and all that, but it's also a lot of fun. Let's keep it that way. Let's strive to be one of the friendliest communities in all of open source. As always, discuss Google Test in the official GoogleTest discussion group. You don't have to actually submit code in order to sign up. Your participation itself is a valuable contribution. ## Style To keep the source consistent, readable, diffable and easy to merge, we use a fairly rigid coding style, as defined by the [google-styleguide](https://github.com/google/styleguide) project. All patches will be expected to conform to the style outlined [here](https://google.github.io/styleguide/cppguide.html). Use [.clang-format](https://github.com/google/googletest/blob/master/.clang-format) to check your formatting. ## Requirements for Contributors If you plan to contribute a patch, you need to build Google Test, Google Mock, and their own tests from a git checkout, which has further requirements: * [Python](https://www.python.org/) v2.3 or newer (for running some of the tests and re-generating certain source files from templates) * [CMake](https://cmake.org/) v2.8.12 or newer ## Developing Google Test and Google Mock This section discusses how to make your own changes to the Google Test project. ### Testing Google Test and Google Mock Themselves To make sure your changes work as intended and don't break existing functionality, you'll want to compile and run Google Test and GoogleMock's own tests. For that you can use CMake: mkdir mybuild cd mybuild cmake -Dgtest_build_tests=ON -Dgmock_build_tests=ON ${GTEST_REPO_DIR} To choose between building only Google Test or Google Mock, you may modify your cmake command to be one of each cmake -Dgtest_build_tests=ON ${GTEST_DIR} # sets up Google Test tests cmake -Dgmock_build_tests=ON ${GMOCK_DIR} # sets up Google Mock tests Make sure you have Python installed, as some of Google Test's tests are written in Python. If the cmake command complains about not being able to find Python (`Could NOT find PythonInterp (missing: PYTHON_EXECUTABLE)`), try telling it explicitly where your Python executable can be found: cmake -DPYTHON_EXECUTABLE=path/to/python ... Next, you can build Google Test and / or Google Mock and all desired tests. On \*nix, this is usually done by make To run the tests, do make test All tests should pass. libvpl-tools-1.3.0/ext/googletest/CONTRIBUTORS000066400000000000000000000043251473010523400210000ustar00rootroot00000000000000# This file contains a list of people who've made non-trivial # contribution to the Google C++ Testing Framework project. People # who commit code to the project are encouraged to add their names # here. Please keep the list sorted by first names. Ajay Joshi Balázs Dán Benoit Sigoure Bharat Mediratta Bogdan Piloca Chandler Carruth Chris Prince Chris Taylor Dan Egnor Dave MacLachlan David Anderson Dean Sturtevant Eric Roman Gene Volovich Hady Zalek Hal Burch Jeffrey Yasskin Jim Keller Joe Walnes Jon Wray Jói Sigurðsson Keir Mierle Keith Ray Kenton Varda Kostya Serebryany Krystian Kuzniarek Lev Makhlis Manuel Klimek Mario Tanev Mark Paskin Markus Heule Martijn Vels Matthew Simmons Mika Raento Mike Bland Miklós Fazekas Neal Norwitz Nermin Ozkiranartli Owen Carlsen Paneendra Ba Pasi Valminen Patrick Hanna Patrick Riley Paul Menage Peter Kaminski Piotr Kaminski Preston Jackson Rainer Klaffenboeck Russ Cox Russ Rufer Sean Mcafee Sigurður Ãsgeirsson Sverre Sundsdal Szymon Sobik Takeshi Yoshino Tracy Bialik Vadim Berman Vlad Losev Wolfgang Klier Zhanyong Wan libvpl-tools-1.3.0/ext/googletest/LICENSE000066400000000000000000000027031473010523400201230ustar00rootroot00000000000000Copyright 2008, Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. libvpl-tools-1.3.0/ext/googletest/README.md000066400000000000000000000111601473010523400203720ustar00rootroot00000000000000# GoogleTest ### Announcements #### Live at Head GoogleTest now follows the [Abseil Live at Head philosophy](https://abseil.io/about/philosophy#upgrade-support). We recommend [updating to the latest commit in the `main` branch as often as possible](https://github.com/abseil/abseil-cpp/blob/master/FAQ.md#what-is-live-at-head-and-how-do-i-do-it). #### Documentation Updates Our documentation is now live on GitHub Pages at https://google.github.io/googletest/. We recommend browsing the documentation on GitHub Pages rather than directly in the repository. #### Release 1.11.0 [Release 1.11.0](https://github.com/google/googletest/releases/tag/release-1.11.0) is now available. #### Coming Soon * We are planning to take a dependency on [Abseil](https://github.com/abseil/abseil-cpp). * More documentation improvements are planned. ## Welcome to **GoogleTest**, Google's C++ test framework! This repository is a merger of the formerly separate GoogleTest and GoogleMock projects. These were so closely related that it makes sense to maintain and release them together. ### Getting Started See the [GoogleTest User's Guide](https://google.github.io/googletest/) for documentation. We recommend starting with the [GoogleTest Primer](https://google.github.io/googletest/primer.html). More information about building GoogleTest can be found at [googletest/README.md](googletest/README.md). ## Features * An [xUnit](https://en.wikipedia.org/wiki/XUnit) test framework. * Test discovery. * A rich set of assertions. * User-defined assertions. * Death tests. * Fatal and non-fatal failures. * Value-parameterized tests. * Type-parameterized tests. * Various options for running the tests. * XML test report generation. ## Supported Platforms GoogleTest requires a codebase and compiler compliant with the C++11 standard or newer. The GoogleTest code is officially supported on the following platforms. Operating systems or tools not listed below are community-supported. For community-supported platforms, patches that do not complicate the code may be considered. If you notice any problems on your platform, please file an issue on the [GoogleTest GitHub Issue Tracker](https://github.com/google/googletest/issues). Pull requests containing fixes are welcome! ### Operating Systems * Linux * macOS * Windows ### Compilers * gcc 5.0+ * clang 5.0+ * MSVC 2015+ **macOS users:** Xcode 9.3+ provides clang 5.0+. ### Build Systems * [Bazel](https://bazel.build/) * [CMake](https://cmake.org/) **Note:** Bazel is the build system used by the team internally and in tests. CMake is supported on a best-effort basis and by the community. ## Who Is Using GoogleTest? In addition to many internal projects at Google, GoogleTest is also used by the following notable projects: * The [Chromium projects](http://www.chromium.org/) (behind the Chrome browser and Chrome OS). * The [LLVM](http://llvm.org/) compiler. * [Protocol Buffers](https://github.com/google/protobuf), Google's data interchange format. * The [OpenCV](http://opencv.org/) computer vision library. ## Related Open Source Projects [GTest Runner](https://github.com/nholthaus/gtest-runner) is a Qt5 based automated test-runner and Graphical User Interface with powerful features for Windows and Linux platforms. [GoogleTest UI](https://github.com/ospector/gtest-gbar) is a test runner that runs your test binary, allows you to track its progress via a progress bar, and displays a list of test failures. Clicking on one shows failure text. GoogleTest UI is written in C#. [GTest TAP Listener](https://github.com/kinow/gtest-tap-listener) is an event listener for GoogleTest that implements the [TAP protocol](https://en.wikipedia.org/wiki/Test_Anything_Protocol) for test result output. If your test runner understands TAP, you may find it useful. [gtest-parallel](https://github.com/google/gtest-parallel) is a test runner that runs tests from your binary in parallel to provide significant speed-up. [GoogleTest Adapter](https://marketplace.visualstudio.com/items?itemName=DavidSchuldenfrei.gtest-adapter) is a VS Code extension allowing to view GoogleTest in a tree view and run/debug your tests. [C++ TestMate](https://github.com/matepek/vscode-catch2-test-adapter) is a VS Code extension allowing to view GoogleTest in a tree view and run/debug your tests. [Cornichon](https://pypi.org/project/cornichon/) is a small Gherkin DSL parser that generates stub code for GoogleTest. ## Contributing Changes Please read [`CONTRIBUTING.md`](https://github.com/google/googletest/blob/master/CONTRIBUTING.md) for details on how to contribute to this project. Happy testing! libvpl-tools-1.3.0/ext/googletest/WORKSPACE000066400000000000000000000033521473010523400204000ustar00rootroot00000000000000workspace(name = "com_google_googletest") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") http_archive( name = "com_google_absl", sha256 = "1a1745b5ee81392f5ea4371a4ca41e55d446eeaee122903b2eaffbd8a3b67a2b", strip_prefix = "abseil-cpp-01cc6567cff77738e416a7ddc17de2d435a780ce", urls = ["https://github.com/abseil/abseil-cpp/archive/01cc6567cff77738e416a7ddc17de2d435a780ce.zip"], # 2022-06-21T19:28:27Z ) # Note this must use a commit from the `abseil` branch of the RE2 project. # https://github.com/google/re2/tree/abseil http_archive( name = "com_googlesource_code_re2", sha256 = "0a890c2aa0bb05b2ce906a15efb520d0f5ad4c7d37b8db959c43772802991887", strip_prefix = "re2-a427f10b9fb4622dd6d8643032600aa1b50fbd12", urls = ["https://github.com/google/re2/archive/a427f10b9fb4622dd6d8643032600aa1b50fbd12.zip"], # 2022-06-09 ) http_archive( name = "rules_python", sha256 = "0b460f17771258341528753b1679335b629d1d25e3af28eda47d009c103a6e15", strip_prefix = "rules_python-aef17ad72919d184e5edb7abf61509eb78e57eda", urls = ["https://github.com/bazelbuild/rules_python/archive/aef17ad72919d184e5edb7abf61509eb78e57eda.zip"], # 2022-06-21T23:44:47Z ) http_archive( name = "bazel_skylib", urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz"], sha256 = "f7be3474d42aae265405a592bb7da8e171919d74c16f082a5457840f06054728", ) http_archive( name = "platforms", sha256 = "a879ea428c6d56ab0ec18224f976515948822451473a80d06c2e50af0bbe5121", strip_prefix = "platforms-da5541f26b7de1dc8e04c075c99df5351742a4a2", urls = ["https://github.com/bazelbuild/platforms/archive/da5541f26b7de1dc8e04c075c99df5351742a4a2.zip"], # 2022-05-27 ) libvpl-tools-1.3.0/ext/googletest/googlemock/000077500000000000000000000000001473010523400212425ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/googlemock/CMakeLists.txt000066400000000000000000000202111473010523400237760ustar00rootroot00000000000000######################################################################## # Note: CMake support is community-based. The maintainers do not use CMake # internally. # # CMake build script for Google Mock. # # To run the tests for Google Mock itself on Linux, use 'make test' or # ctest. You can select which tests to run using 'ctest -R regex'. # For more options, run 'ctest --help'. option(gmock_build_tests "Build all of Google Mock's own tests." OFF) # A directory to find Google Test sources. if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/gtest/CMakeLists.txt") set(gtest_dir gtest) else() set(gtest_dir ../googletest) endif() # Defines pre_project_set_up_hermetic_build() and set_up_hermetic_build(). include("${gtest_dir}/cmake/hermetic_build.cmake" OPTIONAL) if (COMMAND pre_project_set_up_hermetic_build) # Google Test also calls hermetic setup functions from add_subdirectory, # although its changes will not affect things at the current scope. pre_project_set_up_hermetic_build() endif() ######################################################################## # # Project-wide settings # Name of the project. # # CMake files in this project can refer to the root source directory # as ${gmock_SOURCE_DIR} and to the root binary directory as # ${gmock_BINARY_DIR}. # Language "C" is required for find_package(Threads). cmake_minimum_required(VERSION 3.5) cmake_policy(SET CMP0048 NEW) project(gmock VERSION ${GOOGLETEST_VERSION} LANGUAGES CXX C) if (COMMAND set_up_hermetic_build) set_up_hermetic_build() endif() # Instructs CMake to process Google Test's CMakeLists.txt and add its # targets to the current scope. We are placing Google Test's binary # directory in a subdirectory of our own as VC compilation may break # if they are the same (the default). add_subdirectory("${gtest_dir}" "${gmock_BINARY_DIR}/${gtest_dir}") # These commands only run if this is the main project if(CMAKE_PROJECT_NAME STREQUAL "gmock" OR CMAKE_PROJECT_NAME STREQUAL "googletest-distribution") # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to # make it prominent in the GUI. option(BUILD_SHARED_LIBS "Build shared libraries (DLLs)." OFF) else() mark_as_advanced(gmock_build_tests) endif() # Although Google Test's CMakeLists.txt calls this function, the # changes there don't affect the current scope. Therefore we have to # call it again here. config_compiler_and_linker() # from ${gtest_dir}/cmake/internal_utils.cmake # Adds Google Mock's and Google Test's header directories to the search path. set(gmock_build_include_dirs "${gmock_SOURCE_DIR}/include" "${gmock_SOURCE_DIR}" "${gtest_SOURCE_DIR}/include" # This directory is needed to build directly from Google Test sources. "${gtest_SOURCE_DIR}") include_directories(${gmock_build_include_dirs}) ######################################################################## # # Defines the gmock & gmock_main libraries. User tests should link # with one of them. # Google Mock libraries. We build them using more strict warnings than what # are used for other targets, to ensure that Google Mock can be compiled by # a user aggressive about warnings. if (MSVC) cxx_library(gmock "${cxx_strict}" "${gtest_dir}/src/gtest-all.cc" src/gmock-all.cc) cxx_library(gmock_main "${cxx_strict}" "${gtest_dir}/src/gtest-all.cc" src/gmock-all.cc src/gmock_main.cc) else() cxx_library(gmock "${cxx_strict}" src/gmock-all.cc) target_link_libraries(gmock PUBLIC gtest) set_target_properties(gmock PROPERTIES VERSION ${GOOGLETEST_VERSION}) cxx_library(gmock_main "${cxx_strict}" src/gmock_main.cc) target_link_libraries(gmock_main PUBLIC gmock) set_target_properties(gmock_main PROPERTIES VERSION ${GOOGLETEST_VERSION}) endif() # If the CMake version supports it, attach header directory information # to the targets for when we are part of a parent build (ie being pulled # in via add_subdirectory() rather than being a standalone build). if (DEFINED CMAKE_VERSION AND NOT "${CMAKE_VERSION}" VERSION_LESS "2.8.11") string(REPLACE ";" "$" dirs "${gmock_build_include_dirs}") target_include_directories(gmock SYSTEM INTERFACE "$" "$/${CMAKE_INSTALL_INCLUDEDIR}>") target_include_directories(gmock_main SYSTEM INTERFACE "$" "$/${CMAKE_INSTALL_INCLUDEDIR}>") endif() ######################################################################## # # Install rules install_project(gmock gmock_main) ######################################################################## # # Google Mock's own tests. # # You can skip this section if you aren't interested in testing # Google Mock itself. # # The tests are not built by default. To build them, set the # gmock_build_tests option to ON. You can do it by running ccmake # or specifying the -Dgmock_build_tests=ON flag when running cmake. if (gmock_build_tests) # This must be set in the root directory for the tests to be run by # 'make test' or ctest. enable_testing() if (MINGW OR CYGWIN) if (CMAKE_VERSION VERSION_LESS "2.8.12") add_compile_options("-Wa,-mbig-obj") else() add_definitions("-Wa,-mbig-obj") endif() endif() ############################################################ # C++ tests built with standard compiler flags. cxx_test(gmock-actions_test gmock_main) cxx_test(gmock-cardinalities_test gmock_main) cxx_test(gmock_ex_test gmock_main) cxx_test(gmock-function-mocker_test gmock_main) cxx_test(gmock-internal-utils_test gmock_main) cxx_test(gmock-matchers-arithmetic_test gmock_main) cxx_test(gmock-matchers-comparisons_test gmock_main) cxx_test(gmock-matchers-containers_test gmock_main) cxx_test(gmock-matchers-misc_test gmock_main) cxx_test(gmock-more-actions_test gmock_main) cxx_test(gmock-nice-strict_test gmock_main) cxx_test(gmock-port_test gmock_main) cxx_test(gmock-spec-builders_test gmock_main) cxx_test(gmock_link_test gmock_main test/gmock_link2_test.cc) cxx_test(gmock_test gmock_main) if (DEFINED GTEST_HAS_PTHREAD) cxx_test(gmock_stress_test gmock) endif() # gmock_all_test is commented to save time building and running tests. # Uncomment if necessary. # cxx_test(gmock_all_test gmock_main) ############################################################ # C++ tests built with non-standard compiler flags. if (MSVC) cxx_library(gmock_main_no_exception "${cxx_no_exception}" "${gtest_dir}/src/gtest-all.cc" src/gmock-all.cc src/gmock_main.cc) cxx_library(gmock_main_no_rtti "${cxx_no_rtti}" "${gtest_dir}/src/gtest-all.cc" src/gmock-all.cc src/gmock_main.cc) else() cxx_library(gmock_main_no_exception "${cxx_no_exception}" src/gmock_main.cc) target_link_libraries(gmock_main_no_exception PUBLIC gmock) cxx_library(gmock_main_no_rtti "${cxx_no_rtti}" src/gmock_main.cc) target_link_libraries(gmock_main_no_rtti PUBLIC gmock) endif() cxx_test_with_flags(gmock-more-actions_no_exception_test "${cxx_no_exception}" gmock_main_no_exception test/gmock-more-actions_test.cc) cxx_test_with_flags(gmock_no_rtti_test "${cxx_no_rtti}" gmock_main_no_rtti test/gmock-spec-builders_test.cc) cxx_shared_library(shared_gmock_main "${cxx_default}" "${gtest_dir}/src/gtest-all.cc" src/gmock-all.cc src/gmock_main.cc) # Tests that a binary can be built with Google Mock as a shared library. On # some system configurations, it may not possible to run the binary without # knowing more details about the system configurations. We do not try to run # this binary. To get a more robust shared library coverage, configure with # -DBUILD_SHARED_LIBS=ON. cxx_executable_with_flags(shared_gmock_test_ "${cxx_default}" shared_gmock_main test/gmock-spec-builders_test.cc) set_target_properties(shared_gmock_test_ PROPERTIES COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1") ############################################################ # Python tests. cxx_executable(gmock_leak_test_ test gmock_main) py_test(gmock_leak_test) cxx_executable(gmock_output_test_ test gmock) py_test(gmock_output_test) endif() libvpl-tools-1.3.0/ext/googletest/googlemock/README.md000066400000000000000000000030021473010523400225140ustar00rootroot00000000000000# Googletest Mocking (gMock) Framework ### Overview Google's framework for writing and using C++ mock classes. It can help you derive better designs of your system and write better tests. It is inspired by: * [jMock](http://www.jmock.org/) * [EasyMock](http://www.easymock.org/) * [Hamcrest](http://code.google.com/p/hamcrest/) It is designed with C++'s specifics in mind. gMock: - Provides a declarative syntax for defining mocks. - Can define partial (hybrid) mocks, which are a cross of real and mock objects. - Handles functions of arbitrary types and overloaded functions. - Comes with a rich set of matchers for validating function arguments. - Uses an intuitive syntax for controlling the behavior of a mock. - Does automatic verification of expectations (no record-and-replay needed). - Allows arbitrary (partial) ordering constraints on function calls to be expressed. - Lets a user extend it by defining new matchers and actions. - Does not use exceptions. - Is easy to learn and use. Details and examples can be found here: * [gMock for Dummies](https://google.github.io/googletest/gmock_for_dummies.html) * [Legacy gMock FAQ](https://google.github.io/googletest/gmock_faq.html) * [gMock Cookbook](https://google.github.io/googletest/gmock_cook_book.html) * [gMock Cheat Sheet](https://google.github.io/googletest/gmock_cheat_sheet.html) GoogleMock is a part of [GoogleTest C++ testing framework](http://github.com/google/googletest/) and a subject to the same requirements. libvpl-tools-1.3.0/ext/googletest/googlemock/cmake/000077500000000000000000000000001473010523400223225ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/googlemock/cmake/gmock.pc.in000066400000000000000000000005331473010523400243540ustar00rootroot00000000000000libdir=@CMAKE_INSTALL_FULL_LIBDIR@ includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@ Name: gmock Description: GoogleMock (without main() function) Version: @PROJECT_VERSION@ URL: https://github.com/google/googletest Requires: gtest = @PROJECT_VERSION@ Libs: -L${libdir} -lgmock @CMAKE_THREAD_LIBS_INIT@ Cflags: -I${includedir} @GTEST_HAS_PTHREAD_MACRO@ libvpl-tools-1.3.0/ext/googletest/googlemock/cmake/gmock_main.pc.in000066400000000000000000000005421473010523400253600ustar00rootroot00000000000000libdir=@CMAKE_INSTALL_FULL_LIBDIR@ includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@ Name: gmock_main Description: GoogleMock (with main() function) Version: @PROJECT_VERSION@ URL: https://github.com/google/googletest Requires: gmock = @PROJECT_VERSION@ Libs: -L${libdir} -lgmock_main @CMAKE_THREAD_LIBS_INIT@ Cflags: -I${includedir} @GTEST_HAS_PTHREAD_MACRO@ libvpl-tools-1.3.0/ext/googletest/googlemock/docs/000077500000000000000000000000001473010523400221725ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/googlemock/docs/README.md000066400000000000000000000002131473010523400234450ustar00rootroot00000000000000# Content Moved We are working on updates to the GoogleTest documentation, which has moved to the top-level [docs](../../docs) directory. libvpl-tools-1.3.0/ext/googletest/googlemock/include/000077500000000000000000000000001473010523400226655ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/googlemock/include/gmock/000077500000000000000000000000001473010523400237655ustar00rootroot00000000000000libvpl-tools-1.3.0/ext/googletest/googlemock/include/gmock/gmock-actions.h000066400000000000000000002531251473010523400267040ustar00rootroot00000000000000// Copyright 2007, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Google Mock - a framework for writing C++ mock classes. // // The ACTION* family of macros can be used in a namespace scope to // define custom actions easily. The syntax: // // ACTION(name) { statements; } // // will define an action with the given name that executes the // statements. The value returned by the statements will be used as // the return value of the action. Inside the statements, you can // refer to the K-th (0-based) argument of the mock function by // 'argK', and refer to its type by 'argK_type'. For example: // // ACTION(IncrementArg1) { // arg1_type temp = arg1; // return ++(*temp); // } // // allows you to write // // ...WillOnce(IncrementArg1()); // // You can also refer to the entire argument tuple and its type by // 'args' and 'args_type', and refer to the mock function type and its // return type by 'function_type' and 'return_type'. // // Note that you don't need to specify the types of the mock function // arguments. However rest assured that your code is still type-safe: // you'll get a compiler error if *arg1 doesn't support the ++ // operator, or if the type of ++(*arg1) isn't compatible with the // mock function's return type, for example. // // Sometimes you'll want to parameterize the action. For that you can use // another macro: // // ACTION_P(name, param_name) { statements; } // // For example: // // ACTION_P(Add, n) { return arg0 + n; } // // will allow you to write: // // ...WillOnce(Add(5)); // // Note that you don't need to provide the type of the parameter // either. If you need to reference the type of a parameter named // 'foo', you can write 'foo_type'. For example, in the body of // ACTION_P(Add, n) above, you can write 'n_type' to refer to the type // of 'n'. // // We also provide ACTION_P2, ACTION_P3, ..., up to ACTION_P10 to support // multi-parameter actions. // // For the purpose of typing, you can view // // ACTION_Pk(Foo, p1, ..., pk) { ... } // // as shorthand for // // template // FooActionPk Foo(p1_type p1, ..., pk_type pk) { ... } // // In particular, you can provide the template type arguments // explicitly when invoking Foo(), as in Foo(5, false); // although usually you can rely on the compiler to infer the types // for you automatically. You can assign the result of expression // Foo(p1, ..., pk) to a variable of type FooActionPk. This can be useful when composing actions. // // You can also overload actions with different numbers of parameters: // // ACTION_P(Plus, a) { ... } // ACTION_P2(Plus, a, b) { ... } // // While it's tempting to always use the ACTION* macros when defining // a new action, you should also consider implementing ActionInterface // or using MakePolymorphicAction() instead, especially if you need to // use the action a lot. While these approaches require more work, // they give you more control on the types of the mock function // arguments and the action parameters, which in general leads to // better compiler error messages that pay off in the long run. They // also allow overloading actions based on parameter types (as opposed // to just based on the number of parameters). // // CAVEAT: // // ACTION*() can only be used in a namespace scope as templates cannot be // declared inside of a local class. // Users can, however, define any local functors (e.g. a lambda) that // can be used as actions. // // MORE INFORMATION: // // To learn more about using these macros, please search for 'ACTION' on // https://github.com/google/googletest/blob/master/docs/gmock_cook_book.md // IWYU pragma: private, include "gmock/gmock.h" // IWYU pragma: friend gmock/.* #ifndef GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_ACTIONS_H_ #define GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_ACTIONS_H_ #ifndef _WIN32_WCE #include #endif #include #include #include #include #include #include #include #include "gmock/internal/gmock-internal-utils.h" #include "gmock/internal/gmock-port.h" #include "gmock/internal/gmock-pp.h" #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable : 4100) #endif namespace testing { // To implement an action Foo, define: // 1. a class FooAction that implements the ActionInterface interface, and // 2. a factory function that creates an Action object from a // const FooAction*. // // The two-level delegation design follows that of Matcher, providing // consistency for extension developers. It also eases ownership // management as Action objects can now be copied like plain values. namespace internal { // BuiltInDefaultValueGetter::Get() returns a // default-constructed T value. BuiltInDefaultValueGetter::Get() crashes with an error. // // This primary template is used when kDefaultConstructible is true. template struct BuiltInDefaultValueGetter { static T Get() { return T(); } }; template struct BuiltInDefaultValueGetter { static T Get() { Assert(false, __FILE__, __LINE__, "Default action undefined for the function return type."); return internal::Invalid(); // The above statement will never be reached, but is required in // order for this function to compile. } }; // BuiltInDefaultValue::Get() returns the "built-in" default value // for type T, which is NULL when T is a raw pointer type, 0 when T is // a numeric type, false when T is bool, or "" when T is string or // std::string. In addition, in C++11 and above, it turns a // default-constructed T value if T is default constructible. For any // other type T, the built-in default T value is undefined, and the // function will abort the process. template class BuiltInDefaultValue { public: // This function returns true if and only if type T has a built-in default // value. static bool Exists() { return ::std::is_default_constructible::value; } static T Get() { return BuiltInDefaultValueGetter< T, ::std::is_default_constructible::value>::Get(); } }; // This partial specialization says that we use the same built-in // default value for T and const T. template class BuiltInDefaultValue { public: static bool Exists() { return BuiltInDefaultValue::Exists(); } static T Get() { return BuiltInDefaultValue::Get(); } }; // This partial specialization defines the default values for pointer // types. template class BuiltInDefaultValue { public: static bool Exists() { return true; } static T* Get() { return nullptr; } }; // The following specializations define the default values for // specific types we care about. #define GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(type, value) \ template <> \ class BuiltInDefaultValue { \ public: \ static bool Exists() { return true; } \ static type Get() { return value; } \ } GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(void, ); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(::std::string, ""); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(bool, false); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(unsigned char, '\0'); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(signed char, '\0'); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(char, '\0'); // There's no need for a default action for signed wchar_t, as that // type is the same as wchar_t for gcc, and invalid for MSVC. // // There's also no need for a default action for unsigned wchar_t, as // that type is the same as unsigned int for gcc, and invalid for // MSVC. #if GMOCK_WCHAR_T_IS_NATIVE_ GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(wchar_t, 0U); // NOLINT #endif GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(unsigned short, 0U); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(signed short, 0); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(unsigned int, 0U); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(signed int, 0); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(unsigned long, 0UL); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(signed long, 0L); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(unsigned long long, 0); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(signed long long, 0); // NOLINT GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(float, 0); GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_(double, 0); #undef GMOCK_DEFINE_DEFAULT_ACTION_FOR_RETURN_TYPE_ // Partial implementations of metaprogramming types from the standard library // not available in C++11. template struct negation // NOLINTNEXTLINE : std::integral_constant {}; // Base case: with zero predicates the answer is always true. template struct conjunction : std::true_type {}; // With a single predicate, the answer is that predicate. template struct conjunction : P1 {}; // With multiple predicates the answer is the first predicate if that is false, // and we recurse otherwise. template struct conjunction : std::conditional, P1>::type {}; template struct disjunction : std::false_type {}; template struct disjunction : P1 {}; template struct disjunction // NOLINTNEXTLINE : std::conditional, P1>::type {}; template using void_t = void; // Detects whether an expression of type `From` can be implicitly converted to // `To` according to [conv]. In C++17, [conv]/3 defines this as follows: // // An expression e can be implicitly converted to a type T if and only if // the declaration T t=e; is well-formed, for some invented temporary // variable t ([dcl.init]). // // [conv]/2 implies we can use function argument passing to detect whether this // initialization is valid. // // Note that this is distinct from is_convertible, which requires this be valid: // // To test() { // return declval(); // } // // In particular, is_convertible doesn't give the correct answer when `To` and // `From` are the same non-moveable type since `declval` will be an rvalue // reference, defeating the guaranteed copy elision that would otherwise make // this function work. // // REQUIRES: `From` is not cv void. template struct is_implicitly_convertible { private: // A function that accepts a parameter of type T. This can be called with type // U successfully only if U is implicitly convertible to T. template static void Accept(T); // A function that creates a value of type T. template static T Make(); // An overload be selected when implicit conversion from T to To is possible. template (Make()))> static std::true_type TestImplicitConversion(int); // A fallback overload selected in all other cases. template static std::false_type TestImplicitConversion(...); public: using type = decltype(TestImplicitConversion(0)); static constexpr bool value = type::value; }; // Like std::invoke_result_t from C++17, but works only for objects with call // operators (not e.g. member function pointers, which we don't need specific // support for in OnceAction because std::function deals with them). template using call_result_t = decltype(std::declval()(std::declval()...)); template struct is_callable_r_impl : std::false_type {}; // Specialize the struct for those template arguments where call_result_t is // well-formed. When it's not, the generic template above is chosen, resulting // in std::false_type. template struct is_callable_r_impl>, R, F, Args...> : std::conditional< std::is_void::value, // std::true_type, // is_implicitly_convertible, R>>::type {}; // Like std::is_invocable_r from C++17, but works only for objects with call // operators. See the note on call_result_t. template using is_callable_r = is_callable_r_impl; // Like std::as_const from C++17. template typename std::add_const::type& as_const(T& t) { return t; } } // namespace internal // Specialized for function types below. template class OnceAction; // An action that can only be used once. // // This is accepted by WillOnce, which doesn't require the underlying action to // be copy-constructible (only move-constructible), and promises to invoke it as // an rvalue reference. This allows the action to work with move-only types like // std::move_only_function in a type-safe manner. // // For example: // // // Assume we have some API that needs to accept a unique pointer to some // // non-copyable object Foo. // void AcceptUniquePointer(std::unique_ptr foo); // // // We can define an action that provides a Foo to that API. Because It // // has to give away its unique pointer, it must not be called more than // // once, so its call operator is &&-qualified. // struct ProvideFoo { // std::unique_ptr foo; // // void operator()() && { // AcceptUniquePointer(std::move(Foo)); // } // }; // // // This action can be used with WillOnce. // EXPECT_CALL(mock, Call) // .WillOnce(ProvideFoo{std::make_unique(...)}); // // // But a call to WillRepeatedly will fail to compile. This is correct, // // since the action cannot correctly be used repeatedly. // EXPECT_CALL(mock, Call) // .WillRepeatedly(ProvideFoo{std::make_unique(...)}); // // A less-contrived example would be an action that returns an arbitrary type, // whose &&-qualified call operator is capable of dealing with move-only types. template class OnceAction final { private: // True iff we can use the given callable type (or lvalue reference) directly // via StdFunctionAdaptor. template using IsDirectlyCompatible = internal::conjunction< // It must be possible to capture the callable in StdFunctionAdaptor. std::is_constructible::type, Callable>, // The callable must be compatible with our signature. internal::is_callable_r::type, Args...>>; // True iff we can use the given callable type via StdFunctionAdaptor once we // ignore incoming arguments. template using IsCompatibleAfterIgnoringArguments = internal::conjunction< // It must be possible to capture the callable in a lambda. std::is_constructible::type, Callable>, // The callable must be invocable with zero arguments, returning something // convertible to Result. internal::is_callable_r::type>>; public: // Construct from a callable that is directly compatible with our mocked // signature: it accepts our function type's arguments and returns something // convertible to our result type. template ::type>>, IsDirectlyCompatible> // ::value, int>::type = 0> OnceAction(Callable&& callable) // NOLINT : function_(StdFunctionAdaptor::type>( {}, std::forward(callable))) {} // As above, but for a callable that ignores the mocked function's arguments. template ::type>>, // Exclude callables for which the overload above works. // We'd rather provide the arguments if possible. internal::negation>, IsCompatibleAfterIgnoringArguments>::value, int>::type = 0> OnceAction(Callable&& callable) // NOLINT // Call the constructor above with a callable // that ignores the input arguments. : OnceAction(IgnoreIncomingArguments::type>{ std::forward(callable)}) {} // We are naturally copyable because we store only an std::function, but // semantically we should not be copyable. OnceAction(const OnceAction&) = delete; OnceAction& operator=(const OnceAction&) = delete; OnceAction(OnceAction&&) = default; // Invoke the underlying action callable with which we were constructed, // handing it the supplied arguments. Result Call(Args... args) && { return function_(std::forward(args)...); } private: // An adaptor that wraps a callable that is compatible with our signature and // being invoked as an rvalue reference so that it can be used as an // StdFunctionAdaptor. This throws away type safety, but that's fine because // this is only used by WillOnce, which we know calls at most once. // // Once we have something like std::move_only_function from C++23, we can do // away with this. template class StdFunctionAdaptor final { public: // A tag indicating that the (otherwise universal) constructor is accepting // the callable itself, instead of e.g. stealing calls for the move // constructor. struct CallableTag final {}; template explicit StdFunctionAdaptor(CallableTag, F&& callable) : callable_(std::make_shared(std::forward(callable))) {} // Rather than explicitly returning Result, we return whatever the wrapped // callable returns. This allows for compatibility with existing uses like // the following, when the mocked function returns void: // // EXPECT_CALL(mock_fn_, Call) // .WillOnce([&] { // [...] // return 0; // }); // // Such a callable can be turned into std::function. If we use an // explicit return type of Result here then it *doesn't* work with // std::function, because we'll get a "void function should not return a // value" error. // // We need not worry about incompatible result types because the SFINAE on // OnceAction already checks this for us. std::is_invocable_r_v itself makes // the same allowance for void result types. template internal::call_result_t operator()( ArgRefs&&... args) const { return std::move(*callable_)(std::forward(args)...); } private: // We must put the callable on the heap so that we are copyable, which // std::function needs. std::shared_ptr callable_; }; // An adaptor that makes a callable that accepts zero arguments callable with // our mocked arguments. template struct IgnoreIncomingArguments { internal::call_result_t operator()(Args&&...) { return std::move(callable)(); } Callable callable; }; std::function function_; }; // When an unexpected function call is encountered, Google Mock will // let it return a default value if the user has specified one for its // return type, or if the return type has a built-in default value; // otherwise Google Mock won't know what value to return and will have // to abort the process. // // The DefaultValue class allows a user to specify the // default value for a type T that is both copyable and publicly // destructible (i.e. anything that can be used as a function return // type). The usage is: // // // Sets the default value for type T to be foo. // DefaultValue::Set(foo); template class DefaultValue { public: // Sets the default value for type T; requires T to be // copy-constructable and have a public destructor. static void Set(T x) { delete producer_; producer_ = new FixedValueProducer(x); } // Provides a factory function to be called to generate the default value. // This method can be used even if T is only move-constructible, but it is not // limited to that case. typedef T (*FactoryFunction)(); static void SetFactory(FactoryFunction factory) { delete producer_; producer_ = new FactoryValueProducer(factory); } // Unsets the default value for type T. static void Clear() { delete producer_; producer_ = nullptr; } // Returns true if and only if the user has set the default value for type T. static bool IsSet() { return producer_ != nullptr; } // Returns true if T has a default return value set by the user or there // exists a built-in default value. static bool Exists() { return IsSet() || internal::BuiltInDefaultValue::Exists(); } // Returns the default value for type T if the user has set one; // otherwise returns the built-in default value. Requires that Exists() // is true, which ensures that the return value is well-defined. static T Get() { return producer_ == nullptr ? internal::BuiltInDefaultValue::Get() : producer_->Produce(); } private: class ValueProducer { public: virtual ~ValueProducer() {} virtual T Produce() = 0; }; class FixedValueProducer : public ValueProducer { public: explicit FixedValueProducer(T value) : value_(value) {} T Produce() override { return value_; } private: const T value_; FixedValueProducer(const FixedValueProducer&) = delete; FixedValueProducer& operator=(const FixedValueProducer&) = delete; }; class FactoryValueProducer : public ValueProducer { public: explicit FactoryValueProducer(FactoryFunction factory) : factory_(factory) {} T Produce() override { return factory_(); } private: const FactoryFunction factory_; FactoryValueProducer(const FactoryValueProducer&) = delete; FactoryValueProducer& operator=(const FactoryValueProducer&) = delete; }; static ValueProducer* producer_; }; // This partial specialization allows a user to set default values for // reference types. template class DefaultValue { public: // Sets the default value for type T&. static void Set(T& x) { // NOLINT address_ = &x; } // Unsets the default value for type T&. static void Clear() { address_ = nullptr; } // Returns true if and only if the user has set the default value for type T&. static bool IsSet() { return address_ != nullptr; } // Returns true if T has a default return value set by the user or there // exists a built-in default value. static bool Exists() { return IsSet() || internal::BuiltInDefaultValue::Exists(); } // Returns the default value for type T& if the user has set one; // otherwise returns the built-in default value if there is one; // otherwise aborts the process. static T& Get() { return address_ == nullptr ? internal::BuiltInDefaultValue::Get() : *address_; } private: static T* address_; }; // This specialization allows DefaultValue::Get() to // compile. template <> class DefaultValue { public: static bool Exists() { return true; } static void Get() {} }; // Points to the user-set default value for type T. template typename DefaultValue::ValueProducer* DefaultValue::producer_ = nullptr; // Points to the user-set default value for type T&. template T* DefaultValue::address_ = nullptr; // Implement this interface to define an action for function type F. template class ActionInterface { public: typedef typename internal::Function::Result Result; typedef typename internal::Function::ArgumentTuple ArgumentTuple; ActionInterface() {} virtual ~ActionInterface() {} // Performs the action. This method is not const, as in general an // action can have side effects and be stateful. For example, a // get-the-next-element-from-the-collection action will need to // remember the current element. virtual Result Perform(const ArgumentTuple& args) = 0; private: ActionInterface(const ActionInterface&) = delete; ActionInterface& operator=(const ActionInterface&) = delete; }; template class Action; // An Action is a copyable and IMMUTABLE (except by assignment) // object that represents an action to be taken when a mock function of type // R(Args...) is called. The implementation of Action is just a // std::shared_ptr to const ActionInterface. Don't inherit from Action! You // can view an object implementing ActionInterface as a concrete action // (including its current state), and an Action object as a handle to it. template class Action { private: using F = R(Args...); // Adapter class to allow constructing Action from a legacy ActionInterface. // New code should create Actions from functors instead. struct ActionAdapter { // Adapter must be copyable to satisfy std::function requirements. ::std::shared_ptr> impl_; template typename internal::Function::Result operator()(InArgs&&... args) { return impl_->Perform( ::std::forward_as_tuple(::std::forward(args)...)); } }; template using IsCompatibleFunctor = std::is_constructible, G>; public: typedef typename internal::Function::Result Result; typedef typename internal::Function::ArgumentTuple ArgumentTuple; // Constructs a null Action. Needed for storing Action objects in // STL containers. Action() {} // Construct an Action from a specified callable. // This cannot take std::function directly, because then Action would not be // directly constructible from lambda (it would require two conversions). template < typename G, typename = typename std::enable_if, std::is_constructible, G>>::value>::type> Action(G&& fun) { // NOLINT Init(::std::forward(fun), IsCompatibleFunctor()); } // Constructs an Action from its implementation. explicit Action(ActionInterface* impl) : fun_(ActionAdapter{::std::shared_ptr>(impl)}) {} // This constructor allows us to turn an Action object into an // Action, as long as F's arguments can be implicitly converted // to Func's and Func's return type can be implicitly converted to F's. template Action(const Action& action) // NOLINT : fun_(action.fun_) {} // Returns true if and only if this is the DoDefault() action. bool IsDoDefault() const { return fun_ == nullptr; } // Performs the action. Note that this method is const even though // the corresponding method in ActionInterface is not. The reason // is that a const Action means that it cannot be re-bound to // another concrete action, not that the concrete action it binds to // cannot change state. (Think of the difference between a const // pointer and a pointer to const.) Result Perform(ArgumentTuple args) const { if (IsDoDefault()) { internal::IllegalDoDefault(__FILE__, __LINE__); } return internal::Apply(fun_, ::std::move(args)); } // An action can be used as a OnceAction, since it's obviously safe to call it // once. operator OnceAction() const { // NOLINT // Return a OnceAction-compatible callable that calls Perform with the // arguments it is provided. We could instead just return fun_, but then // we'd need to handle the IsDoDefault() case separately. struct OA { Action action; R operator()(Args... args) && { return action.Perform( std::forward_as_tuple(std::forward(args)...)); } }; return OA{*this}; } private: template friend class Action; template void Init(G&& g, ::std::true_type) { fun_ = ::std::forward(g); } template void Init(G&& g, ::std::false_type) { fun_ = IgnoreArgs::type>{::std::forward(g)}; } template struct IgnoreArgs { template Result operator()(const InArgs&...) const { return function_impl(); } FunctionImpl function_impl; }; // fun_ is an empty function if and only if this is the DoDefault() action. ::std::function fun_; }; // The PolymorphicAction class template makes it easy to implement a // polymorphic action (i.e. an action that can be used in mock // functions of than one type, e.g. Return()). // // To define a polymorphic action, a user first provides a COPYABLE // implementation class that has a Perform() method template: // // class FooAction { // public: // template // Result Perform(const ArgumentTuple& args) const { // // Processes the arguments and returns a result, using // // std::get(args) to get the N-th (0-based) argument in the tuple. // } // ... // }; // // Then the user creates the polymorphic action using // MakePolymorphicAction(object) where object has type FooAction. See // the definition of Return(void) and SetArgumentPointee(value) for // complete examples. template class PolymorphicAction { public: explicit PolymorphicAction(const Impl& impl) : impl_(impl) {} template operator Action() const { return Action(new MonomorphicImpl(impl_)); } private: template class MonomorphicImpl : public ActionInterface { public: typedef typename internal::Function::Result Result; typedef typename internal::Function::ArgumentTuple ArgumentTuple; explicit MonomorphicImpl(const Impl& impl) : impl_(impl) {} Result Perform(const ArgumentTuple& args) override { return impl_.template Perform(args); } private: Impl impl_; }; Impl impl_; }; // Creates an Action from its implementation and returns it. The // created Action object owns the implementation. template Action MakeAction(ActionInterface* impl) { return Action(impl); } // Creates a polymorphic action from its implementation. This is // easier to use than the PolymorphicAction constructor as it // doesn't require you to explicitly write the template argument, e.g. // // MakePolymorphicAction(foo); // vs // PolymorphicAction(foo); template inline PolymorphicAction MakePolymorphicAction(const Impl& impl) { return PolymorphicAction(impl); } namespace internal { // Helper struct to specialize ReturnAction to execute a move instead of a copy // on return. Useful for move-only types, but could be used on any type. template struct ByMoveWrapper { explicit ByMoveWrapper(T value) : payload(std::move(value)) {} T payload; }; // The general implementation of Return(R). Specializations follow below. template class ReturnAction final { public: explicit ReturnAction(R value) : value_(std::move(value)) {} template >, // negation>, // std::is_convertible, // std::is_move_constructible>::value>::type> operator OnceAction() && { // NOLINT return Impl(std::move(value_)); } template >, // negation>, // std::is_convertible, // std::is_copy_constructible>::value>::type> operator Action() const { // NOLINT return Impl(value_); } private: // Implements the Return(x) action for a mock function that returns type U. template class Impl final { public: // The constructor used when the return value is allowed to move from the // input value (i.e. we are converting to OnceAction). explicit Impl(R&& input_value) : state_(new State(std::move(input_value))) {} // The constructor used when the return value is not allowed to move from // the input value (i.e. we are converting to Action). explicit Impl(const R& input_value) : state_(new State(input_value)) {} U operator()() && { return std::move(state_->value); } U operator()() const& { return state_->value; } private: // We put our state on the heap so that the compiler-generated copy/move // constructors work correctly even when U is a reference-like type. This is // necessary only because we eagerly create State::value (see the note on // that symbol for details). If we instead had only the input value as a // member then the default constructors would work fine. // // For example, when R is std::string and U is std::string_view, value is a // reference to the string backed by input_value. The copy constructor would // copy both, so that we wind up with a new input_value object (with the // same contents) and a reference to the *old* input_value object rather // than the new one. struct State { explicit State(const R& input_value_in) : input_value(input_value_in), // Make an implicit conversion to Result before initializing the U // object we store, avoiding calling any explicit constructor of U // from R. // // This simulates the language rules: a function with return type U // that does `return R()` requires R to be implicitly convertible to // U, and uses that path for the conversion, even U Result has an // explicit constructor from R. value(ImplicitCast_(internal::as_const(input_value))) {} // As above, but for the case where we're moving from the ReturnAction // object because it's being used as a OnceAction. explicit State(R&& input_value_in) : input_value(std::move(input_value_in)), // For the same reason as above we make an implicit conversion to U // before initializing the value. // // Unlike above we provide the input value as an rvalue to the // implicit conversion because this is a OnceAction: it's fine if it // wants to consume the input value. value(ImplicitCast_(std::move(input_value))) {} // A copy of the value originally provided by the user. We retain this in // addition to the value of the mock function's result type below in case // the latter is a reference-like type. See the std::string_view example // in the documentation on Return. R input_value; // The value we actually return, as the type returned by the mock function // itself. // // We eagerly initialize this here, rather than lazily doing the implicit // conversion automatically each time Perform is called, for historical // reasons: in 2009-11, commit a070cbd91c (Google changelist 13540126) // made the Action conversion operator eagerly convert the R value to // U, but without keeping the R alive. This broke the use case discussed // in the documentation for Return, making reference-like types such as // std::string_view not safe to use as U where the input type R is a // value-like type such as std::string. // // The example the commit gave was not very clear, nor was the issue // thread (https://github.com/google/googlemock/issues/86), but it seems // the worry was about reference-like input types R that flatten to a // value-like type U when being implicitly converted. An example of this // is std::vector::reference, which is often a proxy type with an // reference to the underlying vector: // // // Helper method: have the mock function return bools according // // to the supplied script. // void SetActions(MockFunction& mock, // const std::vector& script) { // for (size_t i = 0; i < script.size(); ++i) { // EXPECT_CALL(mock, Call(i)).WillOnce(Return(script[i])); // } // } // // TEST(Foo, Bar) { // // Set actions using a temporary vector, whose operator[] // // returns proxy objects that references that will be // // dangling once the call to SetActions finishes and the // // vector is destroyed. // MockFunction mock; // SetActions(mock, {false, true}); // // EXPECT_FALSE(mock.AsStdFunction()(0)); // EXPECT_TRUE(mock.AsStdFunction()(1)); // } // // This eager conversion helps with a simple case like this, but doesn't // fully make these types work in general. For example the following still // uses a dangling reference: // // TEST(Foo, Baz) { // MockFunction()> mock; // // // Return the same vector twice, and then the empty vector // // thereafter. // auto action = Return(std::initializer_list{ // "taco", "burrito", // }); // // EXPECT_CALL(mock, Call) // .WillOnce(action) // .WillOnce(action) // .WillRepeatedly(Return(std::vector{})); // // EXPECT_THAT(mock.AsStdFunction()(), // ElementsAre("taco", "burrito")); // EXPECT_THAT(mock.AsStdFunction()(), // ElementsAre("taco", "burrito")); // EXPECT_THAT(mock.AsStdFunction()(), IsEmpty()); // } // U value; }; const std::shared_ptr state_; }; R value_; }; // A specialization of ReturnAction when R is ByMoveWrapper for some T. // // This version applies the type system-defeating hack of moving from T even in // the const call operator, checking at runtime that it isn't called more than // once, since the user has declared their intent to do so by using ByMove. template class ReturnAction> final { public: explicit ReturnAction(ByMoveWrapper wrapper) : state_(new State(std::move(wrapper.payload))) {} T operator()() const { GTEST_CHECK_(!state_->called) << "A ByMove() action must be performed at most once."; state_->called = true; return std::move(state_->value); } private: // We store our state on the heap so that we are copyable as required by // Action, despite the fact that we are stateful and T may not be copyable. struct State { explicit State(T&& value_in) : value(std::move(value_in)) {} T value; bool called = false; }; const std::shared_ptr state_; }; // Implements the ReturnNull() action. class ReturnNullAction { public: // Allows ReturnNull() to be used in any pointer-returning function. In C++11 // this is enforced by returning nullptr, and in non-C++11 by asserting a // pointer type on compile time. template static Result Perform(const ArgumentTuple&) { return nullptr; } }; // Implements the Return() action. class ReturnVoidAction { public: // Allows Return() to be used in any void-returning function. template static void Perform(const ArgumentTuple&) { static_assert(std::is_void::value, "Result should be void."); } }; // Implements the polymorphic ReturnRef(x) action, which can be used // in any function that returns a reference to the type of x, // regardless of the argument types. template class ReturnRefAction { public: // Constructs a ReturnRefAction object from the reference to be returned. explicit ReturnRefAction(T& ref) : ref_(ref) {} // NOLINT // This template type conversion operator allows ReturnRef(x) to be // used in ANY function that returns a reference to x's type. template operator Action() const { typedef typename Function::Result Result; // Asserts that the function return type is a reference. This // catches the user error of using ReturnRef(x) when Return(x) // should be used, and generates some helpful error message. static_assert(std::is_reference::value, "use Return instead of ReturnRef to return a value"); return Action(new Impl(ref_)); } private: // Implements the ReturnRef(x) action for a particular function type F. template class Impl : public ActionInterface { public: typedef typename Function::Result Result; typedef typename Function::ArgumentTuple ArgumentTuple; explicit Impl(T& ref) : ref_(ref) {} // NOLINT Result Perform(const ArgumentTuple&) override { return ref_; } private: T& ref_; }; T& ref_; }; // Implements the polymorphic ReturnRefOfCopy(x) action, which can be // used in any function that returns a reference to the type of x, // regardless of the argument types. template class ReturnRefOfCopyAction { public: // Constructs a ReturnRefOfCopyAction object from the reference to // be returned. explicit ReturnRefOfCopyAction(const T& value) : value_(value) {} // NOLINT // This template type conversion operator allows ReturnRefOfCopy(x) to be // used in ANY function that returns a reference to x's type. template operator Action() const { typedef typename Function::Result Result; // Asserts that the function return type is a reference. This // catches the user error of using ReturnRefOfCopy(x) when Return(x) // should be used, and generates some helpful error message. static_assert(std::is_reference::value, "use Return instead of ReturnRefOfCopy to return a value"); return Action(new Impl(value_)); } private: // Implements the ReturnRefOfCopy(x) action for a particular function type F. template class Impl : public ActionInterface { public: typedef typename Function::Result Result; typedef typename Function::ArgumentTuple ArgumentTuple; explicit Impl(const T& value) : value_(value) {} // NOLINT Result Perform(const ArgumentTuple&) override { return value_; } private: T value_; }; const T value_; }; // Implements the polymorphic ReturnRoundRobin(v) action, which can be // used in any function that returns the element_type of v. template class ReturnRoundRobinAction { public: explicit ReturnRoundRobinAction(std::vector values) { GTEST_CHECK_(!values.empty()) << "ReturnRoundRobin requires at least one element."; state_->values = std::move(values); } template T operator()(Args&&...) const { return state_->Next(); } private: struct State { T Next() { T ret_val = values[i++]; if (i == values.size()) i = 0; return ret_val; } std::vector values; size_t i = 0; }; std::shared_ptr state_ = std::make_shared(); }; // Implements the polymorphic DoDefault() action. class DoDefaultAction { public: // This template type conversion operator allows DoDefault() to be // used in any function. template operator Action() const { return Action(); } // NOLINT }; // Implements the Assign action to set a given pointer referent to a // particular value. template class AssignAction { public: AssignAction(T1* ptr, T2 value) : ptr_(ptr), value_(value) {} template void Perform(const ArgumentTuple& /* args */) const { *ptr_ = value_; } private: T1* const ptr_; const T2 value_; }; #if !GTEST_OS_WINDOWS_MOBILE // Implements the SetErrnoAndReturn action to simulate return from // various system calls and libc functions. template class SetErrnoAndReturnAction { public: SetErrnoAndReturnAction(int errno_value, T result) : errno_(errno_value), result_(result) {} template Result Perform(const ArgumentTuple& /* args */) const { errno = errno_; return result_; } private: const int errno_; const T result_; }; #endif // !GTEST_OS_WINDOWS_MOBILE // Implements the SetArgumentPointee(x) action for any function // whose N-th argument (0-based) is a pointer to x's type. template struct SetArgumentPointeeAction { A value; template void operator()(const Args&... args) const { *::std::get(std::tie(args...)) = value; } }; // Implements the Invoke(object_ptr, &Class::Method) action. template struct InvokeMethodAction { Class* const obj_ptr; const MethodPtr method_ptr; template auto operator()(Args&&... args) const -> decltype((obj_ptr->*method_ptr)(std::forward(args)...)) { return (obj_ptr->*method_ptr)(std::forward(args)...); } }; // Implements the InvokeWithoutArgs(f) action. The template argument // FunctionImpl is the implementation type of f, which can be either a // function pointer or a functor. InvokeWithoutArgs(f) can be used as an // Action as long as f's type is compatible with F. template struct InvokeWithoutArgsAction { FunctionImpl function_impl; // Allows InvokeWithoutArgs(f) to be used as any action whose type is // compatible with f. template auto operator()(const Args&...) -> decltype(function_impl()) { return function_impl(); } }; // Implements the InvokeWithoutArgs(object_ptr, &Class::Method) action. template struct InvokeMethodWithoutArgsAction { Class* const obj_ptr; const MethodPtr method_ptr; using ReturnType = decltype((std::declval()->*std::declval())()); template ReturnType operator()(const Args&...) const { return (obj_ptr->*method_ptr)(); } }; // Implements the IgnoreResult(action) action. template class IgnoreResultAction { public: explicit IgnoreResultAction(const A& action) : action_(action) {} template operator Action() const { // Assert statement belongs here because this is the best place to verify // conditions on F. It produces the clearest error messages // in most compilers. // Impl really belongs in this scope as a local class but can't // because MSVC produces duplicate symbols in different translation units // in this case. Until MS fixes that bug we put Impl into the class scope // and put the typedef both here (for use in assert statement) and // in the Impl class. But both definitions must be the same. typedef typename internal::Function::Result Result; // Asserts at compile time that F returns void. static_assert(std::is_void::value, "Result type should be void."); return Action(new Impl(action_)); } private: template class Impl : public ActionInterface { public: typedef typename internal::Function::Result Result; typedef typename internal::Function::ArgumentTuple ArgumentTuple; explicit Impl(const A& action) : action_(action) {} void Perform(const ArgumentTuple& args) override { // Performs the action and ignores its result. action_.Perform(args); } private: // Type OriginalFunction is the same as F except that its return // type is IgnoredValue. typedef typename internal::Function::MakeResultIgnoredValue OriginalFunction; const Action action_; }; const A action_; }; template struct WithArgsAction { InnerAction inner_action; // The signature of the function as seen by the inner action, given an out // action with the given result and argument types. template using InnerSignature = R(typename std::tuple_element>::type...); // Rather than a call operator, we must define conversion operators to // particular action types. This is necessary for embedded actions like // DoDefault(), which rely on an action conversion operators rather than // providing a call operator because even with a particular set of arguments // they don't have a fixed return type. template >::type...)>>::value, int>::type = 0> operator OnceAction() && { // NOLINT struct OA { OnceAction> inner_action; R operator()(Args&&... args) && { return std::move(inner_action) .Call(std::get( std::forward_as_tuple(std::forward(args)...))...); } }; return OA{std::move(inner_action)}; } template >::type...)>>::value, int>::type = 0> operator Action() const { // NOLINT Action> converted(inner_action); return [converted](Args&&... args) -> R { return converted.Perform(std::forward_as_tuple( std::get(std::forward_as_tuple(std::forward(args)...))...)); }; } }; template class DoAllAction; // Base case: only a single action. template class DoAllAction { public: struct UserConstructorTag {}; template explicit DoAllAction(UserConstructorTag, T&& action) : final_action_(std::forward(action)) {} // Rather than a call operator, we must define conversion operators to // particular action types. This is necessary for embedded actions like // DoDefault(), which rely on an action conversion operators rather than // providing a call operator because even with a particular set of arguments // they don't have a fixed return type. template >::value, int>::type = 0> operator OnceAction() && { // NOLINT return std::move(final_action_); } template < typename R, typename... Args, typename std::enable_if< std::is_convertible>::value, int>::type = 0> operator Action() const { // NOLINT return final_action_; } private: FinalAction final_action_; }; // Recursive case: support N actions by calling the initial action and then // calling through to the base class containing N-1 actions. template class DoAllAction : private DoAllAction { private: using Base = DoAllAction; // The type of reference that should be provided to an initial action for a // mocked function parameter of type T. // // There are two quirks here: // // * Unlike most forwarding functions, we pass scalars through by value. // This isn't strictly necessary because an lvalue reference would work // fine too and be consistent with other non-reference types, but it's // perhaps less surprising. // // For example if the mocked function has signature void(int), then it // might seem surprising for the user's initial action to need to be // convertible to Action. This is perhaps less // surprising for a non-scalar type where there may be a performance // impact, or it might even be impossible, to pass by value. // // * More surprisingly, `const T&` is often not a const reference type. // By the reference collapsing rules in C++17 [dcl.ref]/6, if T refers to // U& or U&& for some non-scalar type U, then InitialActionArgType is // U&. In other words, we may hand over a non-const reference. // // So for example, given some non-scalar type Obj we have the following // mappings: // // T InitialActionArgType // ------- ----------------------- // Obj const Obj& // Obj& Obj& // Obj&& Obj& // const Obj const Obj& // const Obj& const Obj& // const Obj&& const Obj& // // In other words, the initial actions get a mutable view of an non-scalar // argument if and only if the mock function itself accepts a non-const // reference type. They are never given an rvalue reference to an // non-scalar type. // // This situation makes sense if you imagine use with a matcher that is // designed to write through a reference. For example, if the caller wants // to fill in a reference argument and then return a canned value: // // EXPECT_CALL(mock, Call) // .WillOnce(DoAll(SetArgReferee<0>(17), Return(19))); // template using InitialActionArgType = typename std::conditional::value, T, const T&>::type; public: struct UserConstructorTag {}; template explicit DoAllAction(UserConstructorTag, T&& initial_action, U&&... other_actions) : Base({}, std::forward(other_actions)...), initial_action_(std::forward(initial_action)) {} template ...)>>, std::is_convertible>>::value, int>::type = 0> operator OnceAction() && { // NOLINT // Return an action that first calls the initial action with arguments // filtered through InitialActionArgType, then forwards arguments directly // to the base class to deal with the remaining actions. struct OA { OnceAction...)> initial_action; OnceAction remaining_actions; R operator()(Args... args) && { std::move(initial_action) .Call(static_cast>(args)...); return std::move(remaining_actions).Call(std::forward(args)...); } }; return OA{ std::move(initial_action_), std::move(static_cast(*this)), }; } template < typename R, typename... Args, typename std::enable_if< conjunction< // Both the initial action and the rest must support conversion to // Action. std::is_convertible...)>>, std::is_convertible>>::value, int>::type = 0> operator Action() const { // NOLINT // Return an action that first calls the initial action with arguments // filtered through InitialActionArgType, then forwards arguments directly // to the base class to deal with the remaining actions. struct OA { Action...)> initial_action; Action remaining_actions; R operator()(Args... args) const { initial_action.Perform(std::forward_as_tuple( static_cast>(args)...)); return remaining_actions.Perform( std::forward_as_tuple(std::forward(args)...)); } }; return OA{ initial_action_, static_cast(*this), }; } private: InitialAction initial_action_; }; template struct ReturnNewAction { T* operator()() const { return internal::Apply( [](const Params&... unpacked_params) { return new T(unpacked_params...); }, params); } std::tuple params; }; template struct ReturnArgAction { template ::type> auto operator()(Args&&... args) const -> decltype(std::get( std::forward_as_tuple(std::forward(args)...))) { return std::get(std::forward_as_tuple(std::forward(args)...)); } }; template struct SaveArgAction { Ptr pointer; template void operator()(const Args&... args) const { *pointer = std::get(std::tie(args...)); } }; template struct SaveArgPointeeAction { Ptr pointer; template void operator()(const Args&... args) const { *pointer = *std::get(std::tie(args...)); } }; template struct SetArgRefereeAction { T value; template void operator()(Args&&... args) const { using argk_type = typename ::std::tuple_element>::type; static_assert(std::is_lvalue_reference::value, "Argument must be a reference type."); std::get(std::tie(args...)) = value; } }; template struct SetArrayArgumentAction { I1 first; I2 last; template void operator()(const Args&... args) const { auto value = std::get(std::tie(args...)); for (auto it = first; it != last; ++it, (void)++value) { *value = *it; } } }; template struct DeleteArgAction { template void operator()(const Args&... args) const { delete std::get(std::tie(args...)); } }; template struct ReturnPointeeAction { Ptr pointer; template auto operator()(const Args&...) const -> decltype(*pointer) { return *pointer; } }; #if GTEST_HAS_EXCEPTIONS template struct ThrowAction { T exception; // We use a conversion operator to adapt to any return type. template operator Action() const { // NOLINT T copy = exception; return [copy](Args...) -> R { throw copy; }; } }; #endif // GTEST_HAS_EXCEPTIONS } // namespace internal // An Unused object can be implicitly constructed from ANY value. // This is handy when defining actions that ignore some or all of the // mock function arguments. For example, given // // MOCK_METHOD3(Foo, double(const string& label, double x, double y)); // MOCK_METHOD3(Bar, double(int index, double x, double y)); // // instead of // // double DistanceToOriginWithLabel(const string& label, double x, double y) { // return sqrt(x*x + y*y); // } // double DistanceToOriginWithIndex(int index, double x, double y) { // return sqrt(x*x + y*y); // } // ... // EXPECT_CALL(mock, Foo("abc", _, _)) // .WillOnce(Invoke(DistanceToOriginWithLabel)); // EXPECT_CALL(mock, Bar(5, _, _)) // .WillOnce(Invoke(DistanceToOriginWithIndex)); // // you could write // // // We can declare any uninteresting argument as Unused. // double DistanceToOrigin(Unused, double x, double y) { // return sqrt(x*x + y*y); // } // ... // EXPECT_CALL(mock, Foo("abc", _, _)).WillOnce(Invoke(DistanceToOrigin)); // EXPECT_CALL(mock, Bar(5, _, _)).WillOnce(Invoke(DistanceToOrigin)); typedef internal::IgnoredValue Unused; // Creates an action that does actions a1, a2, ..., sequentially in // each invocation. All but the last action will have a readonly view of the // arguments. template internal::DoAllAction::type...> DoAll( Action&&... action) { return internal::DoAllAction::type...>( {}, std::forward(action)...); } // WithArg(an_action) creates an action that passes the k-th // (0-based) argument of the mock function to an_action and performs // it. It adapts an action accepting one argument to one that accepts // multiple arguments. For convenience, we also provide // WithArgs(an_action) (defined below) as a synonym. template internal::WithArgsAction::type, k> WithArg( InnerAction&& action) { return {std::forward(action)}; } // WithArgs(an_action) creates an action that passes // the selected arguments of the mock function to an_action and // performs it. It serves as an adaptor between actions with // different argument lists. template internal::WithArgsAction::type, k, ks...> WithArgs(InnerAction&& action) { return {std::forward(action)}; } // WithoutArgs(inner_action) can be used in a mock function with a // non-empty argument list to perform inner_action, which takes no // argument. In other words, it adapts an action accepting no // argument to one that accepts (and ignores) arguments. template internal::WithArgsAction::type> WithoutArgs( InnerAction&& action) { return {std::forward(action)}; } // Creates an action that returns a value. // // The returned type can be used with a mock function returning a non-void, // non-reference type U as follows: // // * If R is convertible to U and U is move-constructible, then the action can // be used with WillOnce. // // * If const R& is convertible to U and U is copy-constructible, then the // action can be used with both WillOnce and WillRepeatedly. // // The mock expectation contains the R value from which the U return value is // constructed (a move/copy of the argument to Return). This means that the R // value will survive at least until the mock object's expectations are cleared // or the mock object is destroyed, meaning that U can safely be a // reference-like type such as std::string_view: // // // The mock function returns a view of a copy of the string fed to // // Return. The view is valid even after the action is performed. // MockFunction mock; // EXPECT_CALL(mock, Call).WillOnce(Return(std::string("taco"))); // const std::string_view result = mock.AsStdFunction()(); // EXPECT_EQ("taco", result); // template internal::ReturnAction Return(R value) { return internal::ReturnAction(std::move(value)); } // Creates an action that returns NULL. inline PolymorphicAction ReturnNull() { return MakePolymorphicAction(internal::ReturnNullAction()); } // Creates an action that returns from a void function. inline PolymorphicAction Return() { return MakePolymorphicAction(internal::ReturnVoidAction()); } // Creates an action that returns the reference to a variable. template inline internal::ReturnRefAction ReturnRef(R& x) { // NOLINT return internal::ReturnRefAction(x); } // Prevent using ReturnRef on reference to temporary. template internal::ReturnRefAction ReturnRef(R&&) = delete; // Creates an action that returns the reference to a copy of the // argument. The copy is created when the action is constructed and // lives as long as the action. template inline internal::ReturnRefOfCopyAction ReturnRefOfCopy(const R& x) { return internal::ReturnRefOfCopyAction(x); } // DEPRECATED: use Return(x) directly with WillOnce. // // Modifies the parent action (a Return() action) to perform a move of the // argument instead of a copy. // Return(ByMove()) actions can only be executed once and will assert this // invariant. template internal::ByMoveWrapper ByMove(R x) { return internal::ByMoveWrapper(std::move(x)); } // Creates an action that returns an element of `vals`. Calling this action will // repeatedly return the next value from `vals` until it reaches the end and // will restart from the beginning. template internal::ReturnRoundRobinAction ReturnRoundRobin(std::vector vals) { return internal::ReturnRoundRobinAction(std::move(vals)); } // Creates an action that returns an element of `vals`. Calling this action will // repeatedly return the next value from `vals` until it reaches the end and // will restart from the beginning. template internal::ReturnRoundRobinAction ReturnRoundRobin( std::initializer_list vals) { return internal::ReturnRoundRobinAction(std::vector(vals)); } // Creates an action that does the default action for the give mock function. inline internal::DoDefaultAction DoDefault() { return internal::DoDefaultAction(); } // Creates an action that sets the variable pointed by the N-th // (0-based) function argument to 'value'. template internal::SetArgumentPointeeAction SetArgPointee(T value) { return {std::move(value)}; } // The following version is DEPRECATED. template internal::SetArgumentPointeeAction SetArgumentPointee(T value) { return {std::move(value)}; } // Creates an action that sets a pointer referent to a given value. template PolymorphicAction> Assign(T1* ptr, T2 val) { return MakePolymorphicAction(internal::AssignAction(ptr, val)); } #if !GTEST_OS_WINDOWS_MOBILE // Creates an action that sets errno and returns the appropriate error. template PolymorphicAction> SetErrnoAndReturn( int errval, T result) { return MakePolymorphicAction( internal::SetErrnoAndReturnAction(errval, result)); } #endif // !GTEST_OS_WINDOWS_MOBILE // Various overloads for Invoke(). // Legacy function. // Actions can now be implicitly constructed from callables. No need to create // wrapper objects. // This function exists for backwards compatibility. template typename std::decay::type Invoke(FunctionImpl&& function_impl) { return std::forward(function_impl); } // Creates an action that invokes the given method on the given object // with the mock function's arguments. template internal::InvokeMethodAction Invoke(Class* obj_ptr, MethodPtr method_ptr) { return {obj_ptr, method_ptr}; } // Creates an action that invokes 'function_impl' with no argument. template internal::InvokeWithoutArgsAction::type> InvokeWithoutArgs(FunctionImpl function_impl) { return {std::move(function_impl)}; } // Creates an action that invokes the given method on the given object // with no argument. template internal::InvokeMethodWithoutArgsAction InvokeWithoutArgs( Class* obj_ptr, MethodPtr method_ptr) { return {obj_ptr, method_ptr}; } // Creates an action that performs an_action and throws away its // result. In other words, it changes the return type of an_action to // void. an_action MUST NOT return void, or the code won't compile. template inline internal::IgnoreResultAction IgnoreResult(const A& an_action) { return internal::IgnoreResultAction(an_action); } // Creates a reference wrapper for the given L-value. If necessary, // you can explicitly specify the type of the reference. For example, // suppose 'derived' is an object of type Derived, ByRef(derived) // would wrap a Derived&. If you want to wrap a const Base& instead, // where Base is a base class of Derived, just write: // // ByRef(derived) // // N.B. ByRef is redundant with std::ref, std::cref and std::reference_wrapper. // However, it may still be used for consistency with ByMove(). template inline ::std::reference_wrapper ByRef(T& l_value) { // NOLINT return ::std::reference_wrapper(l_value); } // The ReturnNew(a1, a2, ..., a_k) action returns a pointer to a new // instance of type T, constructed on the heap with constructor arguments // a1, a2, ..., and a_k. The caller assumes ownership of the returned value. template internal::ReturnNewAction::type...> ReturnNew( Params&&... params) { return {std::forward_as_tuple(std::forward(params)...)}; } // Action ReturnArg() returns the k-th argument of the mock function. template internal::ReturnArgAction ReturnArg() { return {}; } // Action SaveArg(pointer) saves the k-th (0-based) argument of the // mock function to *pointer. template internal::SaveArgAction SaveArg(Ptr pointer) { return {pointer}; } // Action SaveArgPointee(pointer) saves the value pointed to // by the k-th (0-based) argument of the mock function to *pointer. template internal::SaveArgPointeeAction SaveArgPointee(Ptr pointer) { return {pointer}; } // Action SetArgReferee(value) assigns 'value' to the variable // referenced by the k-th (0-based) argument of the mock function. template internal::SetArgRefereeAction::type> SetArgReferee( T&& value) { return {std::forward(value)}; } // Action SetArrayArgument(first, last) copies the elements in // source range [first, last) to the array pointed to by the k-th // (0-based) argument, which can be either a pointer or an // iterator. The action does not take ownership of the elements in the // source range. template internal::SetArrayArgumentAction SetArrayArgument(I1 first, I2 last) { return {first, last}; } // Action DeleteArg() deletes the k-th (0-based) argument of the mock // function. template internal::DeleteArgAction DeleteArg() { return {}; } // This action returns the value pointed to by 'pointer'. template internal::ReturnPointeeAction ReturnPointee(Ptr pointer) { return {pointer}; } // Action Throw(exception) can be used in a mock function of any type // to throw the given exception. Any copyable value can be thrown. #if GTEST_HAS_EXCEPTIONS template internal::ThrowAction::type> Throw(T&& exception) { return {std::forward(exception)}; } #endif // GTEST_HAS_EXCEPTIONS namespace internal { // A macro from the ACTION* family (defined later in gmock-generated-actions.h) // defines an action that can be used in a mock function. Typically, // these actions only care about a subset of the arguments of the mock // function. For example, if such an action only uses the second // argument, it can be used in any mock function that takes >= 2 // arguments where the type of the second argument is compatible. // // Therefore, the action implementation must be prepared to take more // arguments than it needs. The ExcessiveArg type is used to // represent those excessive arguments. In order to keep the compiler // error messages tractable, we define it in the testing namespace // instead of testing::internal. However, this is an INTERNAL TYPE // and subject to change without notice, so a user MUST NOT USE THIS // TYPE DIRECTLY. struct ExcessiveArg {}; // Builds an implementation of an Action<> for some particular signature, using // a class defined by an ACTION* macro. template struct ActionImpl; template struct ImplBase { struct Holder { // Allows each copy of the Action<> to get to the Impl. explicit operator const Impl&() const { return *ptr; } std::shared_ptr ptr; }; using type = typename std::conditional::value, Impl, Holder>::type; }; template struct ActionImpl : ImplBase::type { using Base = typename ImplBase::type; using function_type = R(Args...); using args_type = std::tuple; ActionImpl() = default; // Only defined if appropriate for Base. explicit ActionImpl(std::shared_ptr impl) : Base{std::move(impl)} {} R operator()(Args&&... arg) const { static constexpr size_t kMaxArgs = sizeof...(Args) <= 10 ? sizeof...(Args) : 10; return Apply(MakeIndexSequence{}, MakeIndexSequence<10 - kMaxArgs>{}, args_type{std::forward(arg)...}); } template R Apply(IndexSequence, IndexSequence, const args_type& args) const { // Impl need not be specific to the signature of action being implemented; // only the implementing function body needs to have all of the specific // types instantiated. Up to 10 of the args that are provided by the // args_type get passed, followed by a dummy of unspecified type for the // remainder up to 10 explicit args. static constexpr ExcessiveArg kExcessArg{}; return static_cast(*this) .template gmock_PerformImpl< /*function_type=*/function_type, /*return_type=*/R, /*args_type=*/args_type, /*argN_type=*/ typename std::tuple_element::type...>( /*args=*/args, std::get(args)..., ((void)excess_id, kExcessArg)...); } }; // Stores a default-constructed Impl as part of the Action<>'s // std::function<>. The Impl should be trivial to copy. template ::testing::Action MakeAction() { return ::testing::Action(ActionImpl()); } // Stores just the one given instance of Impl. template ::testing::Action MakeAction(std::shared_ptr impl) { return ::testing::Action(ActionImpl(std::move(impl))); } #define GMOCK_INTERNAL_ARG_UNUSED(i, data, el) \ , const arg##i##_type& arg##i GTEST_ATTRIBUTE_UNUSED_ #define GMOCK_ACTION_ARG_TYPES_AND_NAMES_UNUSED_ \ const args_type& args GTEST_ATTRIBUTE_UNUSED_ GMOCK_PP_REPEAT( \ GMOCK_INTERNAL_ARG_UNUSED, , 10) #define GMOCK_INTERNAL_ARG(i, data, el) , const arg##i##_type& arg##i #define GMOCK_ACTION_ARG_TYPES_AND_NAMES_ \ const args_type& args GMOCK_PP_REPEAT(GMOCK_INTERNAL_ARG, , 10) #define GMOCK_INTERNAL_TEMPLATE_ARG(i, data, el) , typename arg##i##_type #define GMOCK_ACTION_TEMPLATE_ARGS_NAMES_ \ GMOCK_PP_TAIL(GMOCK_PP_REPEAT(GMOCK_INTERNAL_TEMPLATE_ARG, , 10)) #define GMOCK_INTERNAL_TYPENAME_PARAM(i, data, param) , typename param##_type #define GMOCK_ACTION_TYPENAME_PARAMS_(params) \ GMOCK_PP_TAIL(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_TYPENAME_PARAM, , params)) #define GMOCK_INTERNAL_TYPE_PARAM(i, data, param) , param##_type #define GMOCK_ACTION_TYPE_PARAMS_(params) \ GMOCK_PP_TAIL(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_TYPE_PARAM, , params)) #define GMOCK_INTERNAL_TYPE_GVALUE_PARAM(i, data, param) \ , param##_type gmock_p##i #define GMOCK_ACTION_TYPE_GVALUE_PARAMS_(params) \ GMOCK_PP_TAIL(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_TYPE_GVALUE_PARAM, , params)) #define GMOCK_INTERNAL_GVALUE_PARAM(i, data, param) \ , std::forward(gmock_p##i) #define GMOCK_ACTION_GVALUE_PARAMS_(params) \ GMOCK_PP_TAIL(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_GVALUE_PARAM, , params)) #define GMOCK_INTERNAL_INIT_PARAM(i, data, param) \ , param(::std::forward(gmock_p##i)) #define GMOCK_ACTION_INIT_PARAMS_(params) \ GMOCK_PP_TAIL(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_INIT_PARAM, , params)) #define GMOCK_INTERNAL_FIELD_PARAM(i, data, param) param##_type param; #define GMOCK_ACTION_FIELD_PARAMS_(params) \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_FIELD_PARAM, , params) #define GMOCK_INTERNAL_ACTION(name, full_name, params) \ template \ class full_name { \ public: \ explicit full_name(GMOCK_ACTION_TYPE_GVALUE_PARAMS_(params)) \ : impl_(std::make_shared( \ GMOCK_ACTION_GVALUE_PARAMS_(params))) {} \ full_name(const full_name&) = default; \ full_name(full_name&&) noexcept = default; \ template \ operator ::testing::Action() const { \ return ::testing::internal::MakeAction(impl_); \ } \ \ private: \ class gmock_Impl { \ public: \ explicit gmock_Impl(GMOCK_ACTION_TYPE_GVALUE_PARAMS_(params)) \ : GMOCK_ACTION_INIT_PARAMS_(params) {} \ template \ return_type gmock_PerformImpl(GMOCK_ACTION_ARG_TYPES_AND_NAMES_) const; \ GMOCK_ACTION_FIELD_PARAMS_(params) \ }; \ std::shared_ptr impl_; \ }; \ template \ inline full_name name( \ GMOCK_ACTION_TYPE_GVALUE_PARAMS_(params)) GTEST_MUST_USE_RESULT_; \ template \ inline full_name name( \ GMOCK_ACTION_TYPE_GVALUE_PARAMS_(params)) { \ return full_name( \ GMOCK_ACTION_GVALUE_PARAMS_(params)); \ } \ template \ template \ return_type \ full_name::gmock_Impl::gmock_PerformImpl( \ GMOCK_ACTION_ARG_TYPES_AND_NAMES_UNUSED_) const } // namespace internal // Similar to GMOCK_INTERNAL_ACTION, but no bound parameters are stored. #define ACTION(name) \ class name##Action { \ public: \ explicit name##Action() noexcept {} \ name##Action(const name##Action&) noexcept {} \ template \ operator ::testing::Action() const { \ return ::testing::internal::MakeAction(); \ } \ \ private: \ class gmock_Impl { \ public: \ template \ return_type gmock_PerformImpl(GMOCK_ACTION_ARG_TYPES_AND_NAMES_) const; \ }; \ }; \ inline name##Action name() GTEST_MUST_USE_RESULT_; \ inline name##Action name() { return name##Action(); } \ template \ return_type name##Action::gmock_Impl::gmock_PerformImpl( \ GMOCK_ACTION_ARG_TYPES_AND_NAMES_UNUSED_) const #define ACTION_P(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP, (__VA_ARGS__)) #define ACTION_P2(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP2, (__VA_ARGS__)) #define ACTION_P3(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP3, (__VA_ARGS__)) #define ACTION_P4(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP4, (__VA_ARGS__)) #define ACTION_P5(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP5, (__VA_ARGS__)) #define ACTION_P6(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP6, (__VA_ARGS__)) #define ACTION_P7(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP7, (__VA_ARGS__)) #define ACTION_P8(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP8, (__VA_ARGS__)) #define ACTION_P9(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP9, (__VA_ARGS__)) #define ACTION_P10(name, ...) \ GMOCK_INTERNAL_ACTION(name, name##ActionP10, (__VA_ARGS__)) } // namespace testing #ifdef _MSC_VER #pragma warning(pop) #endif #endif // GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_ACTIONS_H_ libvpl-tools-1.3.0/ext/googletest/googlemock/include/gmock/gmock-cardinalities.h000066400000000000000000000137261473010523400300600ustar00rootroot00000000000000// Copyright 2007, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Google Mock - a framework for writing C++ mock classes. // // This file implements some commonly used cardinalities. More // cardinalities can be defined by the user implementing the // CardinalityInterface interface if necessary. // IWYU pragma: private, include "gmock/gmock.h" // IWYU pragma: friend gmock/.* #ifndef GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_CARDINALITIES_H_ #define GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_CARDINALITIES_H_ #include #include #include // NOLINT #include "gmock/internal/gmock-port.h" #include "gtest/gtest.h" GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ /* class A needs to have dll-interface to be used by clients of class B */) namespace testing { // To implement a cardinality Foo, define: // 1. a class FooCardinality that implements the // CardinalityInterface interface, and // 2. a factory function that creates a Cardinality object from a // const FooCardinality*. // // The two-level delegation design follows that of Matcher, providing // consistency for extension developers. It also eases ownership // management as Cardinality objects can now be copied like plain values. // The implementation of a cardinality. class CardinalityInterface { public: virtual ~CardinalityInterface() {} // Conservative estimate on the lower/upper bound of the number of // calls allowed. virtual int ConservativeLowerBound() const { return 0; } virtual int ConservativeUpperBound() const { return INT_MAX; } // Returns true if and only if call_count calls will satisfy this // cardinality. virtual bool IsSatisfiedByCallCount(int call_count) const = 0; // Returns true if and only if call_count calls will saturate this // cardinality. virtual bool IsSaturatedByCallCount(int call_count) const = 0; // Describes self to an ostream. virtual void DescribeTo(::std::ostream* os) const = 0; }; // A Cardinality is a copyable and IMMUTABLE (except by assignment) // object that specifies how many times a mock function is expected to // be called. The implementation of Cardinality is just a std::shared_ptr // to const CardinalityInterface. Don't inherit from Cardinality! class GTEST_API_ Cardinality { public: // Constructs a null cardinality. Needed for storing Cardinality // objects in STL containers. Cardinality() {} // Constructs a Cardinality from its implementation. explicit Cardinality(const CardinalityInterface* impl) : impl_(impl) {} // Conservative estimate on the lower/upper bound of the number of // calls allowed. int ConservativeLowerBound() const { return impl_->ConservativeLowerBound(); } int ConservativeUpperBound() const { return impl_->ConservativeUpperBound(); } // Returns true if and only if call_count calls will satisfy this // cardinality. bool IsSatisfiedByCallCount(int call_count) const { return impl_->IsSatisfiedByCallCount(call_count); } // Returns true if and only if call_count calls will saturate this // cardinality. bool IsSaturatedByCallCount(int call_count) const { return impl_->IsSaturatedByCallCount(call_count); } // Returns true if and only if call_count calls will over-saturate this // cardinality, i.e. exceed the maximum number of allowed calls. bool IsOverSaturatedByCallCount(int call_count) const { return impl_->IsSaturatedByCallCount(call_count) && !impl_->IsSatisfiedByCallCount(call_count); } // Describes self to an ostream void DescribeTo(::std::ostream* os) const { impl_->DescribeTo(os); } // Describes the given actual call count to an ostream. static void DescribeActualCallCountTo(int actual_call_count, ::std::ostream* os); private: std::shared_ptr impl_; }; // Creates a cardinality that allows at least n calls. GTEST_API_ Cardinality AtLeast(int n); // Creates a cardinality that allows at most n calls. GTEST_API_ Cardinality AtMost(int n); // Creates a cardinality that allows any number of calls. GTEST_API_ Cardinality AnyNumber(); // Creates a cardinality that allows between min and max calls. GTEST_API_ Cardinality Between(int min, int max); // Creates a cardinality that allows exactly n calls. GTEST_API_ Cardinality Exactly(int n); // Creates a cardinality from its implementation. inline Cardinality MakeCardinality(const CardinalityInterface* c) { return Cardinality(c); } } // namespace testing GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 #endif // GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_CARDINALITIES_H_ libvpl-tools-1.3.0/ext/googletest/googlemock/include/gmock/gmock-function-mocker.h000066400000000000000000000616331473010523400303500ustar00rootroot00000000000000// Copyright 2007, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Google Mock - a framework for writing C++ mock classes. // // This file implements MOCK_METHOD. // IWYU pragma: private, include "gmock/gmock.h" // IWYU pragma: friend gmock/.* #ifndef GOOGLEMOCK_INCLUDE_GMOCK_INTERNAL_GMOCK_FUNCTION_MOCKER_H_ // NOLINT #define GOOGLEMOCK_INCLUDE_GMOCK_INTERNAL_GMOCK_FUNCTION_MOCKER_H_ // NOLINT #include // IWYU pragma: keep #include // IWYU pragma: keep #include "gmock/gmock-spec-builders.h" #include "gmock/internal/gmock-internal-utils.h" #include "gmock/internal/gmock-pp.h" namespace testing { namespace internal { template using identity_t = T; template struct ThisRefAdjuster { template using AdjustT = typename std::conditional< std::is_const::type>::value, typename std::conditional::value, const T&, const T&&>::type, typename std::conditional::value, T&, T&&>::type>::type; template static AdjustT Adjust(const MockType& mock) { return static_cast>(const_cast(mock)); } }; constexpr bool PrefixOf(const char* a, const char* b) { return *a == 0 || (*a == *b && internal::PrefixOf(a + 1, b + 1)); } template constexpr bool StartsWith(const char (&prefix)[N], const char (&str)[M]) { return N <= M && internal::PrefixOf(prefix, str); } template constexpr bool EndsWith(const char (&suffix)[N], const char (&str)[M]) { return N <= M && internal::PrefixOf(suffix, str + M - N); } template constexpr bool Equals(const char (&a)[N], const char (&b)[M]) { return N == M && internal::PrefixOf(a, b); } template constexpr bool ValidateSpec(const char (&spec)[N]) { return internal::Equals("const", spec) || internal::Equals("override", spec) || internal::Equals("final", spec) || internal::Equals("noexcept", spec) || (internal::StartsWith("noexcept(", spec) && internal::EndsWith(")", spec)) || internal::Equals("ref(&)", spec) || internal::Equals("ref(&&)", spec) || (internal::StartsWith("Calltype(", spec) && internal::EndsWith(")", spec)); } } // namespace internal // The style guide prohibits "using" statements in a namespace scope // inside a header file. However, the FunctionMocker class template // is meant to be defined in the ::testing namespace. The following // line is just a trick for working around a bug in MSVC 8.0, which // cannot handle it if we define FunctionMocker in ::testing. using internal::FunctionMocker; } // namespace testing #define MOCK_METHOD(...) \ GMOCK_PP_VARIADIC_CALL(GMOCK_INTERNAL_MOCK_METHOD_ARG_, __VA_ARGS__) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_1(...) \ GMOCK_INTERNAL_WRONG_ARITY(__VA_ARGS__) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_2(...) \ GMOCK_INTERNAL_WRONG_ARITY(__VA_ARGS__) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_3(_Ret, _MethodName, _Args) \ GMOCK_INTERNAL_MOCK_METHOD_ARG_4(_Ret, _MethodName, _Args, ()) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_4(_Ret, _MethodName, _Args, _Spec) \ GMOCK_INTERNAL_ASSERT_PARENTHESIS(_Args); \ GMOCK_INTERNAL_ASSERT_PARENTHESIS(_Spec); \ GMOCK_INTERNAL_ASSERT_VALID_SIGNATURE( \ GMOCK_PP_NARG0 _Args, GMOCK_INTERNAL_SIGNATURE(_Ret, _Args)); \ GMOCK_INTERNAL_ASSERT_VALID_SPEC(_Spec) \ GMOCK_INTERNAL_MOCK_METHOD_IMPL( \ GMOCK_PP_NARG0 _Args, _MethodName, GMOCK_INTERNAL_HAS_CONST(_Spec), \ GMOCK_INTERNAL_HAS_OVERRIDE(_Spec), GMOCK_INTERNAL_HAS_FINAL(_Spec), \ GMOCK_INTERNAL_GET_NOEXCEPT_SPEC(_Spec), \ GMOCK_INTERNAL_GET_CALLTYPE_SPEC(_Spec), \ GMOCK_INTERNAL_GET_REF_SPEC(_Spec), \ (GMOCK_INTERNAL_SIGNATURE(_Ret, _Args))) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_5(...) \ GMOCK_INTERNAL_WRONG_ARITY(__VA_ARGS__) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_6(...) \ GMOCK_INTERNAL_WRONG_ARITY(__VA_ARGS__) #define GMOCK_INTERNAL_MOCK_METHOD_ARG_7(...) \ GMOCK_INTERNAL_WRONG_ARITY(__VA_ARGS__) #define GMOCK_INTERNAL_WRONG_ARITY(...) \ static_assert( \ false, \ "MOCK_METHOD must be called with 3 or 4 arguments. _Ret, " \ "_MethodName, _Args and optionally _Spec. _Args and _Spec must be " \ "enclosed in parentheses. If _Ret is a type with unprotected commas, " \ "it must also be enclosed in parentheses.") #define GMOCK_INTERNAL_ASSERT_PARENTHESIS(_Tuple) \ static_assert( \ GMOCK_PP_IS_ENCLOSED_PARENS(_Tuple), \ GMOCK_PP_STRINGIZE(_Tuple) " should be enclosed in parentheses.") #define GMOCK_INTERNAL_ASSERT_VALID_SIGNATURE(_N, ...) \ static_assert( \ std::is_function<__VA_ARGS__>::value, \ "Signature must be a function type, maybe return type contains " \ "unprotected comma."); \ static_assert( \ ::testing::tuple_size::ArgumentTuple>::value == _N, \ "This method does not take " GMOCK_PP_STRINGIZE( \ _N) " arguments. Parenthesize all types with unprotected commas.") #define GMOCK_INTERNAL_ASSERT_VALID_SPEC(_Spec) \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_ASSERT_VALID_SPEC_ELEMENT, ~, _Spec) #define GMOCK_INTERNAL_MOCK_METHOD_IMPL(_N, _MethodName, _Constness, \ _Override, _Final, _NoexceptSpec, \ _CallType, _RefSpec, _Signature) \ typename ::testing::internal::Function::Result \ GMOCK_INTERNAL_EXPAND(_CallType) \ _MethodName(GMOCK_PP_REPEAT(GMOCK_INTERNAL_PARAMETER, _Signature, _N)) \ GMOCK_PP_IF(_Constness, const, ) _RefSpec _NoexceptSpec \ GMOCK_PP_IF(_Override, override, ) GMOCK_PP_IF(_Final, final, ) { \ GMOCK_MOCKER_(_N, _Constness, _MethodName) \ .SetOwnerAndName(this, #_MethodName); \ return GMOCK_MOCKER_(_N, _Constness, _MethodName) \ .Invoke(GMOCK_PP_REPEAT(GMOCK_INTERNAL_FORWARD_ARG, _Signature, _N)); \ } \ ::testing::MockSpec gmock_##_MethodName( \ GMOCK_PP_REPEAT(GMOCK_INTERNAL_MATCHER_PARAMETER, _Signature, _N)) \ GMOCK_PP_IF(_Constness, const, ) _RefSpec { \ GMOCK_MOCKER_(_N, _Constness, _MethodName).RegisterOwner(this); \ return GMOCK_MOCKER_(_N, _Constness, _MethodName) \ .With(GMOCK_PP_REPEAT(GMOCK_INTERNAL_MATCHER_ARGUMENT, , _N)); \ } \ ::testing::MockSpec gmock_##_MethodName( \ const ::testing::internal::WithoutMatchers&, \ GMOCK_PP_IF(_Constness, const, )::testing::internal::Function< \ GMOCK_PP_REMOVE_PARENS(_Signature)>*) const _RefSpec _NoexceptSpec { \ return ::testing::internal::ThisRefAdjuster::Adjust(*this) \ .gmock_##_MethodName(GMOCK_PP_REPEAT( \ GMOCK_INTERNAL_A_MATCHER_ARGUMENT, _Signature, _N)); \ } \ mutable ::testing::FunctionMocker \ GMOCK_MOCKER_(_N, _Constness, _MethodName) #define GMOCK_INTERNAL_EXPAND(...) __VA_ARGS__ // Valid modifiers. #define GMOCK_INTERNAL_HAS_CONST(_Tuple) \ GMOCK_PP_HAS_COMMA(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_DETECT_CONST, ~, _Tuple)) #define GMOCK_INTERNAL_HAS_OVERRIDE(_Tuple) \ GMOCK_PP_HAS_COMMA( \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_DETECT_OVERRIDE, ~, _Tuple)) #define GMOCK_INTERNAL_HAS_FINAL(_Tuple) \ GMOCK_PP_HAS_COMMA(GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_DETECT_FINAL, ~, _Tuple)) #define GMOCK_INTERNAL_GET_NOEXCEPT_SPEC(_Tuple) \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_NOEXCEPT_SPEC_IF_NOEXCEPT, ~, _Tuple) #define GMOCK_INTERNAL_NOEXCEPT_SPEC_IF_NOEXCEPT(_i, _, _elem) \ GMOCK_PP_IF( \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_NOEXCEPT(_i, _, _elem)), \ _elem, ) #define GMOCK_INTERNAL_GET_CALLTYPE_SPEC(_Tuple) \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_CALLTYPE_SPEC_IF_CALLTYPE, ~, _Tuple) #define GMOCK_INTERNAL_CALLTYPE_SPEC_IF_CALLTYPE(_i, _, _elem) \ GMOCK_PP_IF( \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_CALLTYPE(_i, _, _elem)), \ GMOCK_PP_CAT(GMOCK_INTERNAL_UNPACK_, _elem), ) #define GMOCK_INTERNAL_GET_REF_SPEC(_Tuple) \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_REF_SPEC_IF_REF, ~, _Tuple) #define GMOCK_INTERNAL_REF_SPEC_IF_REF(_i, _, _elem) \ GMOCK_PP_IF(GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_REF(_i, _, _elem)), \ GMOCK_PP_CAT(GMOCK_INTERNAL_UNPACK_, _elem), ) #ifdef GMOCK_INTERNAL_STRICT_SPEC_ASSERT #define GMOCK_INTERNAL_ASSERT_VALID_SPEC_ELEMENT(_i, _, _elem) \ static_assert( \ ::testing::internal::ValidateSpec(GMOCK_PP_STRINGIZE(_elem)), \ "Token \'" GMOCK_PP_STRINGIZE( \ _elem) "\' cannot be recognized as a valid specification " \ "modifier. Is a ',' missing?"); #else #define GMOCK_INTERNAL_ASSERT_VALID_SPEC_ELEMENT(_i, _, _elem) \ static_assert( \ (GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_CONST(_i, _, _elem)) + \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_OVERRIDE(_i, _, _elem)) + \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_FINAL(_i, _, _elem)) + \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_NOEXCEPT(_i, _, _elem)) + \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_REF(_i, _, _elem)) + \ GMOCK_PP_HAS_COMMA(GMOCK_INTERNAL_DETECT_CALLTYPE(_i, _, _elem))) == 1, \ GMOCK_PP_STRINGIZE( \ _elem) " cannot be recognized as a valid specification modifier."); #endif // GMOCK_INTERNAL_STRICT_SPEC_ASSERT // Modifiers implementation. #define GMOCK_INTERNAL_DETECT_CONST(_i, _, _elem) \ GMOCK_PP_CAT(GMOCK_INTERNAL_DETECT_CONST_I_, _elem) #define GMOCK_INTERNAL_DETECT_CONST_I_const , #define GMOCK_INTERNAL_DETECT_OVERRIDE(_i, _, _elem) \ GMOCK_PP_CAT(GMOCK_INTERNAL_DETECT_OVERRIDE_I_, _elem) #define GMOCK_INTERNAL_DETECT_OVERRIDE_I_override , #define GMOCK_INTERNAL_DETECT_FINAL(_i, _, _elem) \ GMOCK_PP_CAT(GMOCK_INTERNAL_DETECT_FINAL_I_, _elem) #define GMOCK_INTERNAL_DETECT_FINAL_I_final , #define GMOCK_INTERNAL_DETECT_NOEXCEPT(_i, _, _elem) \ GMOCK_PP_CAT(GMOCK_INTERNAL_DETECT_NOEXCEPT_I_, _elem) #define GMOCK_INTERNAL_DETECT_NOEXCEPT_I_noexcept , #define GMOCK_INTERNAL_DETECT_REF(_i, _, _elem) \ GMOCK_PP_CAT(GMOCK_INTERNAL_DETECT_REF_I_, _elem) #define GMOCK_INTERNAL_DETECT_REF_I_ref , #define GMOCK_INTERNAL_UNPACK_ref(x) x #define GMOCK_INTERNAL_DETECT_CALLTYPE(_i, _, _elem) \ GMOCK_PP_CAT(GMOCK_INTERNAL_DETECT_CALLTYPE_I_, _elem) #define GMOCK_INTERNAL_DETECT_CALLTYPE_I_Calltype , #define GMOCK_INTERNAL_UNPACK_Calltype(...) __VA_ARGS__ // Note: The use of `identity_t` here allows _Ret to represent return types that // would normally need to be specified in a different way. For example, a method // returning a function pointer must be written as // // fn_ptr_return_t (*method(method_args_t...))(fn_ptr_args_t...) // // But we only support placing the return type at the beginning. To handle this, // we wrap all calls in identity_t, so that a declaration will be expanded to // // identity_t method(method_args_t...) // // This allows us to work around the syntactic oddities of function/method // types. #define GMOCK_INTERNAL_SIGNATURE(_Ret, _Args) \ ::testing::internal::identity_t( \ GMOCK_PP_FOR_EACH(GMOCK_INTERNAL_GET_TYPE, _, _Args)) #define GMOCK_INTERNAL_GET_TYPE(_i, _, _elem) \ GMOCK_PP_COMMA_IF(_i) \ GMOCK_PP_IF(GMOCK_PP_IS_BEGIN_PARENS(_elem), GMOCK_PP_REMOVE_PARENS, \ GMOCK_PP_IDENTITY) \ (_elem) #define GMOCK_INTERNAL_PARAMETER(_i, _Signature, _) \ GMOCK_PP_COMMA_IF(_i) \ GMOCK_INTERNAL_ARG_O(_i, GMOCK_PP_REMOVE_PARENS(_Signature)) \ gmock_a##_i #define GMOCK_INTERNAL_FORWARD_ARG(_i, _Signature, _) \ GMOCK_PP_COMMA_IF(_i) \ ::std::forward(gmock_a##_i) #define GMOCK_INTERNAL_MATCHER_PARAMETER(_i, _Signature, _) \ GMOCK_PP_COMMA_IF(_i) \ GMOCK_INTERNAL_MATCHER_O(_i, GMOCK_PP_REMOVE_PARENS(_Signature)) \ gmock_a##_i #define GMOCK_INTERNAL_MATCHER_ARGUMENT(_i, _1, _2) \ GMOCK_PP_COMMA_IF(_i) \ gmock_a##_i #define GMOCK_INTERNAL_A_MATCHER_ARGUMENT(_i, _Signature, _) \ GMOCK_PP_COMMA_IF(_i) \ ::testing::A() #define GMOCK_INTERNAL_ARG_O(_i, ...) \ typename ::testing::internal::Function<__VA_ARGS__>::template Arg<_i>::type #define GMOCK_INTERNAL_MATCHER_O(_i, ...) \ const ::testing::Matcher::template Arg<_i>::type>& #define MOCK_METHOD0(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 0, __VA_ARGS__) #define MOCK_METHOD1(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 1, __VA_ARGS__) #define MOCK_METHOD2(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 2, __VA_ARGS__) #define MOCK_METHOD3(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 3, __VA_ARGS__) #define MOCK_METHOD4(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 4, __VA_ARGS__) #define MOCK_METHOD5(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 5, __VA_ARGS__) #define MOCK_METHOD6(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 6, __VA_ARGS__) #define MOCK_METHOD7(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 7, __VA_ARGS__) #define MOCK_METHOD8(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 8, __VA_ARGS__) #define MOCK_METHOD9(m, ...) GMOCK_INTERNAL_MOCK_METHODN(, , m, 9, __VA_ARGS__) #define MOCK_METHOD10(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, , m, 10, __VA_ARGS__) #define MOCK_CONST_METHOD0(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 0, __VA_ARGS__) #define MOCK_CONST_METHOD1(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 1, __VA_ARGS__) #define MOCK_CONST_METHOD2(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 2, __VA_ARGS__) #define MOCK_CONST_METHOD3(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 3, __VA_ARGS__) #define MOCK_CONST_METHOD4(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 4, __VA_ARGS__) #define MOCK_CONST_METHOD5(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 5, __VA_ARGS__) #define MOCK_CONST_METHOD6(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 6, __VA_ARGS__) #define MOCK_CONST_METHOD7(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 7, __VA_ARGS__) #define MOCK_CONST_METHOD8(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 8, __VA_ARGS__) #define MOCK_CONST_METHOD9(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 9, __VA_ARGS__) #define MOCK_CONST_METHOD10(m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, , m, 10, __VA_ARGS__) #define MOCK_METHOD0_T(m, ...) MOCK_METHOD0(m, __VA_ARGS__) #define MOCK_METHOD1_T(m, ...) MOCK_METHOD1(m, __VA_ARGS__) #define MOCK_METHOD2_T(m, ...) MOCK_METHOD2(m, __VA_ARGS__) #define MOCK_METHOD3_T(m, ...) MOCK_METHOD3(m, __VA_ARGS__) #define MOCK_METHOD4_T(m, ...) MOCK_METHOD4(m, __VA_ARGS__) #define MOCK_METHOD5_T(m, ...) MOCK_METHOD5(m, __VA_ARGS__) #define MOCK_METHOD6_T(m, ...) MOCK_METHOD6(m, __VA_ARGS__) #define MOCK_METHOD7_T(m, ...) MOCK_METHOD7(m, __VA_ARGS__) #define MOCK_METHOD8_T(m, ...) MOCK_METHOD8(m, __VA_ARGS__) #define MOCK_METHOD9_T(m, ...) MOCK_METHOD9(m, __VA_ARGS__) #define MOCK_METHOD10_T(m, ...) MOCK_METHOD10(m, __VA_ARGS__) #define MOCK_CONST_METHOD0_T(m, ...) MOCK_CONST_METHOD0(m, __VA_ARGS__) #define MOCK_CONST_METHOD1_T(m, ...) MOCK_CONST_METHOD1(m, __VA_ARGS__) #define MOCK_CONST_METHOD2_T(m, ...) MOCK_CONST_METHOD2(m, __VA_ARGS__) #define MOCK_CONST_METHOD3_T(m, ...) MOCK_CONST_METHOD3(m, __VA_ARGS__) #define MOCK_CONST_METHOD4_T(m, ...) MOCK_CONST_METHOD4(m, __VA_ARGS__) #define MOCK_CONST_METHOD5_T(m, ...) MOCK_CONST_METHOD5(m, __VA_ARGS__) #define MOCK_CONST_METHOD6_T(m, ...) MOCK_CONST_METHOD6(m, __VA_ARGS__) #define MOCK_CONST_METHOD7_T(m, ...) MOCK_CONST_METHOD7(m, __VA_ARGS__) #define MOCK_CONST_METHOD8_T(m, ...) MOCK_CONST_METHOD8(m, __VA_ARGS__) #define MOCK_CONST_METHOD9_T(m, ...) MOCK_CONST_METHOD9(m, __VA_ARGS__) #define MOCK_CONST_METHOD10_T(m, ...) MOCK_CONST_METHOD10(m, __VA_ARGS__) #define MOCK_METHOD0_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 0, __VA_ARGS__) #define MOCK_METHOD1_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 1, __VA_ARGS__) #define MOCK_METHOD2_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 2, __VA_ARGS__) #define MOCK_METHOD3_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 3, __VA_ARGS__) #define MOCK_METHOD4_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 4, __VA_ARGS__) #define MOCK_METHOD5_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 5, __VA_ARGS__) #define MOCK_METHOD6_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 6, __VA_ARGS__) #define MOCK_METHOD7_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 7, __VA_ARGS__) #define MOCK_METHOD8_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 8, __VA_ARGS__) #define MOCK_METHOD9_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 9, __VA_ARGS__) #define MOCK_METHOD10_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(, ct, m, 10, __VA_ARGS__) #define MOCK_CONST_METHOD0_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 0, __VA_ARGS__) #define MOCK_CONST_METHOD1_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 1, __VA_ARGS__) #define MOCK_CONST_METHOD2_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 2, __VA_ARGS__) #define MOCK_CONST_METHOD3_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 3, __VA_ARGS__) #define MOCK_CONST_METHOD4_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 4, __VA_ARGS__) #define MOCK_CONST_METHOD5_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 5, __VA_ARGS__) #define MOCK_CONST_METHOD6_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 6, __VA_ARGS__) #define MOCK_CONST_METHOD7_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 7, __VA_ARGS__) #define MOCK_CONST_METHOD8_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 8, __VA_ARGS__) #define MOCK_CONST_METHOD9_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 9, __VA_ARGS__) #define MOCK_CONST_METHOD10_WITH_CALLTYPE(ct, m, ...) \ GMOCK_INTERNAL_MOCK_METHODN(const, ct, m, 10, __VA_ARGS__) #define MOCK_METHOD0_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD0_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD1_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD1_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD2_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD2_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD3_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD3_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD4_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD4_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD5_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD5_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD6_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD6_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD7_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD7_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD8_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD8_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD9_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD9_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_METHOD10_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_METHOD10_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD0_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD0_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD1_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD1_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD2_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD2_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD3_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD3_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD4_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD4_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD5_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD5_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD6_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD6_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD7_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD7_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD8_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD8_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD9_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD9_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define MOCK_CONST_METHOD10_T_WITH_CALLTYPE(ct, m, ...) \ MOCK_CONST_METHOD10_WITH_CALLTYPE(ct, m, __VA_ARGS__) #define GMOCK_INTERNAL_MOCK_METHODN(constness, ct, Method, args_num, ...) \ GMOCK_INTERNAL_ASSERT_VALID_SIGNATURE( \ args_num, ::testing::internal::identity_t<__VA_ARGS__>); \ GMOCK_INTERNAL_MOCK_METHOD_IMPL( \ args_num, Method, GMOCK_PP_NARG0(constness), 0, 0, , ct, , \ (::testing::internal::identity_t<__VA_ARGS__>)) #define GMOCK_MOCKER_(arity, constness, Method) \ GTEST_CONCAT_TOKEN_(gmock##constness##arity##_##Method##_, __LINE__) #endif // GOOGLEMOCK_INCLUDE_GMOCK_INTERNAL_GMOCK_FUNCTION_MOCKER_H_ libvpl-tools-1.3.0/ext/googletest/googlemock/include/gmock/gmock-matchers.h000066400000000000000000006303721473010523400270550ustar00rootroot00000000000000// Copyright 2007, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Google Mock - a framework for writing C++ mock classes. // // The MATCHER* family of macros can be used in a namespace scope to // define custom matchers easily. // // Basic Usage // =========== // // The syntax // // MATCHER(name, description_string) { statements; } // // defines a matcher with the given name that executes the statements, // which must return a bool to indicate if the match succeeds. Inside // the statements, you can refer to the value being matched by 'arg', // and refer to its type by 'arg_type'. // // The description string documents what the matcher does, and is used // to generate the failure message when the match fails. Since a // MATCHER() is usually defined in a header file shared by multiple // C++ source files, we require the description to be a C-string // literal to avoid possible side effects. It can be empty, in which // case we'll use the sequence of words in the matcher name as the // description. // // For example: // // MATCHER(IsEven, "") { return (arg % 2) == 0; } // // allows you to write // // // Expects mock_foo.Bar(n) to be called where n is even. // EXPECT_CALL(mock_foo, Bar(IsEven())); // // or, // // // Verifies that the value of some_expression is even. // EXPECT_THAT(some_expression, IsEven()); // // If the above assertion fails, it will print something like: // // Value of: some_expression // Expected: is even // Actual: 7 // // where the description "is even" is automatically calculated from the // matcher name IsEven. // // Argument Type // ============= // // Note that the type of the value being matched (arg_type) is // determined by the context in which you use the matcher and is // supplied to you by the compiler, so you don't need to worry about // declaring it (nor can you). This allows the matcher to be // polymorphic. For example, IsEven() can be used to match any type // where the value of "(arg % 2) == 0" can be implicitly converted to // a bool. In the "Bar(IsEven())" example above, if method Bar() // takes an int, 'arg_type' will be int; if it takes an unsigned long, // 'arg_type' will be unsigned long; and so on. // // Parameterizing Matchers // ======================= // // Sometimes you'll want to parameterize the matcher. For that you // can use another macro: // // MATCHER_P(name, param_name, description_string) { statements; } // // For example: // // MATCHER_P(HasAbsoluteValue, value, "") { return abs(arg) == value; } // // will allow you to write: // // EXPECT_THAT(Blah("a"), HasAbsoluteValue(n)); // // which may lead to this message (assuming n is 10): // // Value of: Blah("a") // Expected: has absolute value 10 // Actual: -9 // // Note that both the matcher description and its parameter are // printed, making the message human-friendly. // // In the matcher definition body, you can write 'foo_type' to // reference the type of a parameter named 'foo'. For example, in the // body of MATCHER_P(HasAbsoluteValue, value) above, you can write // 'value_type' to refer to the type of 'value'. // // We also provide MATCHER_P2, MATCHER_P3, ..., up to MATCHER_P$n to // support multi-parameter matchers. // // Describing Parameterized Matchers // ================================= // // The last argument to MATCHER*() is a string-typed expression. The // expression can reference all of the matcher's parameters and a // special bool-typed variable named 'negation'. When 'negation' is // false, the expression should evaluate to the matcher's description; // otherwise it should evaluate to the description of the negation of // the matcher. For example, // // using testing::PrintToString; // // MATCHER_P2(InClosedRange, low, hi, // std::string(negation ? "is not" : "is") + " in range [" + // PrintToString(low) + ", " + PrintToString(hi) + "]") { // return low <= arg && arg <= hi; // } // ... // EXPECT_THAT(3, InClosedRange(4, 6)); // EXPECT_THAT(3, Not(InClosedRange(2, 4))); // // would generate two failures that contain the text: // // Expected: is in range [4, 6] // ... // Expected: is not in range [2, 4] // // If you specify "" as the description, the failure message will // contain the sequence of words in the matcher name followed by the // parameter values printed as a tuple. For example, // // MATCHER_P2(InClosedRange, low, hi, "") { ... } // ... // EXPECT_THAT(3, InClosedRange(4, 6)); // EXPECT_THAT(3, Not(InClosedRange(2, 4))); // // would generate two failures that contain the text: // // Expected: in closed range (4, 6) // ... // Expected: not (in closed range (2, 4)) // // Types of Matcher Parameters // =========================== // // For the purpose of typing, you can view // // MATCHER_Pk(Foo, p1, ..., pk, description_string) { ... } // // as shorthand for // // template // FooMatcherPk // Foo(p1_type p1, ..., pk_type pk) { ... } // // When you write Foo(v1, ..., vk), the compiler infers the types of // the parameters v1, ..., and vk for you. If you are not happy with // the result of the type inference, you can specify the types by // explicitly instantiating the template, as in Foo(5, // false). As said earlier, you don't get to (or need to) specify // 'arg_type' as that's determined by the context in which the matcher // is used. You can assign the result of expression Foo(p1, ..., pk) // to a variable of type FooMatcherPk. This // can be useful when composing matchers. // // While you can instantiate a matcher template with reference types, // passing the parameters by pointer usually makes your code more // readable. If, however, you still want to pass a parameter by // reference, be aware that in the failure message generated by the // matcher you will see the value of the referenced object but not its // address. // // Explaining Match Results // ======================== // // Sometimes the matcher description alone isn't enough to explain why // the match has failed or succeeded. For example, when expecting a // long string, it can be very helpful to also print the diff between // the expected string and the actual one. To achieve that, you can // optionally stream additional information to a special variable // named result_listener, whose type is a pointer to class // MatchResultListener: // // MATCHER_P(EqualsLongString, str, "") { // if (arg == str) return true; // // *result_listener << "the difference: " /// << DiffStrings(str, arg); // return false; // } // // Overloading Matchers // ==================== // // You can overload matchers with different numbers of parameters: // // MATCHER_P(Blah, a, description_string1) { ... } // MATCHER_P2(Blah, a, b, description_string2) { ... } // // Caveats // ======= // // When defining a new matcher, you should also consider implementing // MatcherInterface or using MakePolymorphicMatcher(). These // approaches require more work than the MATCHER* macros, but also // give you more control on the types of the value being matched and // the matcher parameters, which may leads to better compiler error // messages when the matcher is used wrong. They also allow // overloading matchers based on parameter types (as opposed to just // based on the number of parameters). // // MATCHER*() can only be used in a namespace scope as templates cannot be // declared inside of a local class. // // More Information // ================ // // To learn more about using these macros, please search for 'MATCHER' // on // https://github.com/google/googletest/blob/master/docs/gmock_cook_book.md // // This file also implements some commonly used argument matchers. More // matchers can be defined by the user implementing the // MatcherInterface interface if necessary. // // See googletest/include/gtest/gtest-matchers.h for the definition of class // Matcher, class MatcherInterface, and others. // IWYU pragma: private, include "gmock/gmock.h" // IWYU pragma: friend gmock/.* #ifndef GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_MATCHERS_H_ #define GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_MATCHERS_H_ #include #include #include #include #include #include #include // NOLINT #include #include #include #include #include #include "gmock/internal/gmock-internal-utils.h" #include "gmock/internal/gmock-port.h" #include "gmock/internal/gmock-pp.h" #include "gtest/gtest.h" // MSVC warning C5046 is new as of VS2017 version 15.8. #if defined(_MSC_VER) && _MSC_VER >= 1915 #define GMOCK_MAYBE_5046_ 5046 #else #define GMOCK_MAYBE_5046_ #endif GTEST_DISABLE_MSC_WARNINGS_PUSH_( 4251 GMOCK_MAYBE_5046_ /* class A needs to have dll-interface to be used by clients of class B */ /* Symbol involving type with internal linkage not defined */) namespace testing { // To implement a matcher Foo for type T, define: // 1. a class FooMatcherImpl that implements the // MatcherInterface interface, and // 2. a factory function that creates a Matcher object from a // FooMatcherImpl*. // // The two-level delegation design makes it possible to allow a user // to write "v" instead of "Eq(v)" where a Matcher is expected, which // is impossible if we pass matchers by pointers. It also eases // ownership management as Matcher objects can now be copied like // plain values. // A match result listener that stores the explanation in a string. class StringMatchResultListener : public MatchResultListener { public: StringMatchResultListener() : MatchResultListener(&ss_) {} // Returns the explanation accumulated so far. std::string str() const { return ss_.str(); } // Clears the explanation accumulated so far. void Clear() { ss_.str(""); } private: ::std::stringstream ss_; StringMatchResultListener(const StringMatchResultListener&) = delete; StringMatchResultListener& operator=(const StringMatchResultListener&) = delete; }; // Anything inside the 'internal' namespace IS INTERNAL IMPLEMENTATION // and MUST NOT BE USED IN USER CODE!!! namespace internal { // The MatcherCastImpl class template is a helper for implementing // MatcherCast(). We need this helper in order to partially // specialize the implementation of MatcherCast() (C++ allows // class/struct templates to be partially specialized, but not // function templates.). // This general version is used when MatcherCast()'s argument is a // polymorphic matcher (i.e. something that can be converted to a // Matcher but is not one yet; for example, Eq(value)) or a value (for // example, "hello"). template class MatcherCastImpl { public: static Matcher Cast(const M& polymorphic_matcher_or_value) { // M can be a polymorphic matcher, in which case we want to use // its conversion operator to create Matcher. Or it can be a value // that should be passed to the Matcher's constructor. // // We can't call Matcher(polymorphic_matcher_or_value) when M is a // polymorphic matcher because it'll be ambiguous if T has an implicit // constructor from M (this usually happens when T has an implicit // constructor from any type). // // It won't work to unconditionally implicit_cast // polymorphic_matcher_or_value to Matcher because it won't trigger // a user-defined conversion from M to T if one exists (assuming M is // a value). return CastImpl(polymorphic_matcher_or_value, std::is_convertible>{}, std::is_convertible{}); } private: template static Matcher CastImpl(const M& polymorphic_matcher_or_value, std::true_type /* convertible_to_matcher */, std::integral_constant) { // M is implicitly convertible to Matcher, which means that either // M is a polymorphic matcher or Matcher has an implicit constructor // from M. In both cases using the implicit conversion will produce a // matcher. // // Even if T has an implicit constructor from M, it won't be called because // creating Matcher would require a chain of two user-defined conversions // (first to create T from M and then to create Matcher from T). return polymorphic_matcher_or_value; } // M can't be implicitly converted to Matcher, so M isn't a polymorphic // matcher. It's a value of a type implicitly convertible to T. Use direct // initialization to create a matcher. static Matcher CastImpl(const M& value, std::false_type /* convertible_to_matcher */, std::true_type /* convertible_to_T */) { return Matcher(ImplicitCast_(value)); } // M can't be implicitly converted to either Matcher or T. Attempt to use // polymorphic matcher Eq(value) in this case. // // Note that we first attempt to perform an implicit cast on the value and // only fall back to the polymorphic Eq() matcher afterwards because the // latter calls bool operator==(const Lhs& lhs, const Rhs& rhs) in the end // which might be undefined even when Rhs is implicitly convertible to Lhs // (e.g. std::pair vs. std::pair). // // We don't define this method inline as we need the declaration of Eq(). static Matcher CastImpl(const M& value, std::false_type /* convertible_to_matcher */, std::false_type /* convertible_to_T */); }; // This more specialized version is used when MatcherCast()'s argument // is already a Matcher. This only compiles when type T can be // statically converted to type U. template class MatcherCastImpl> { public: static Matcher Cast(const Matcher& source_matcher) { return Matcher(new Impl(source_matcher)); } private: class Impl : public MatcherInterface { public: explicit Impl(const Matcher& source_matcher) : source_matcher_(source_matcher) {} // We delegate the matching logic to the source matcher. bool MatchAndExplain(T x, MatchResultListener* listener) const override { using FromType = typename std::remove_cv::type>::type>::type; using ToType = typename std::remove_cv::type>::type>::type; // Do not allow implicitly converting base*/& to derived*/&. static_assert( // Do not trigger if only one of them is a pointer. That implies a // regular conversion and not a down_cast. (std::is_pointer::type>::value != std::is_pointer::type>::value) || std::is_same::value || !std::is_base_of::value, "Can't implicitly convert from to "); // Do the cast to `U` explicitly if necessary. // Otherwise, let implicit conversions do the trick. using CastType = typename std::conditional::value, T&, U>::type; return source_matcher_.MatchAndExplain(static_cast(x), listener); } void DescribeTo(::std::ostream* os) const override { source_matcher_.DescribeTo(os); } void DescribeNegationTo(::std::ostream* os) const override { source_matcher_.DescribeNegationTo(os); } private: const Matcher source_matcher_; }; }; // This even more specialized version is used for efficiently casting // a matcher to its own type. template class MatcherCastImpl> { public: static Matcher Cast(const Matcher& matcher) { return matcher; } }; // Template specialization for parameterless Matcher. template class MatcherBaseImpl { public: MatcherBaseImpl() = default; template operator ::testing::Matcher() const { // NOLINT(runtime/explicit) return ::testing::Matcher(new typename Derived::template gmock_Impl()); } }; // Template specialization for Matcher with parameters. template