github-actions-models-0.14.0/.cargo_vcs_info.json0000644000000001360000000000100153170ustar { "git": { "sha1": "0107768c9afbf314bfb177394e47499bb346b652" }, "path_in_vcs": "" }github-actions-models-0.14.0/.github/dependabot.yml000064400000000000000000000003061046102023000202760ustar 00000000000000version: 2 updates: - package-ecosystem: cargo directory: "/" schedule: interval: weekly - package-ecosystem: github-actions directory: / schedule: interval: weekly github-actions-models-0.14.0/.github/workflows/ci.yml000064400000000000000000000010451046102023000206220ustar 00000000000000name: CI on: push: branches: - main pull_request: jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: persist-credentials: false - name: Format run: cargo fmt && git diff --exit-code - name: Lint run: | rustup component add clippy cargo clippy -- -D warnings test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: persist-credentials: false - name: Test run: cargo test github-actions-models-0.14.0/.github/workflows/release.yml000064400000000000000000000005321046102023000216470ustar 00000000000000on: release: types: - published name: release jobs: release: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: persist-credentials: false - name: publish to crates.io run: cargo publish env: CARGO_REGISTRY_TOKEN: "${{ secrets.CARGO_REGISTRY_TOKEN }}" github-actions-models-0.14.0/.github/workflows/zizmor.yml000064400000000000000000000015511046102023000215630ustar 00000000000000name: GitHub Actions Security Analysis with zizmor 🌈 on: push: branches: ["main"] pull_request: branches: ["**"] jobs: zizmor: name: zizmor latest via Cargo runs-on: ubuntu-latest permissions: security-events: write # required for workflows in private repositories contents: read actions: read steps: - name: Checkout repository uses: actions/checkout@v4 with: persist-credentials: false - name: Install the latest version of uv uses: astral-sh/setup-uv@v4 - name: Run zizmor 🌈 run: uvx zizmor --format sarif . > results.sarif env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Upload SARIF file uses: github/codeql-action/upload-sarif@v3 with: sarif_file: results.sarif category: zizmor github-actions-models-0.14.0/.gitignore000064400000000000000000000000241046102023000160730ustar 00000000000000/target /Cargo.lock github-actions-models-0.14.0/Cargo.toml0000644000000026450000000000100133240ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "github-actions-models" version = "0.14.0" authors = ["William Woodruff "] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components" readme = "README.md" keywords = [ "github", "ci", ] categories = ["api-bindings"] license = "MIT" repository = "https://github.com/woodruffw/github-actions-models" [lib] name = "github_actions_models" path = "src/lib.rs" [[test]] name = "test_action" path = "tests/test_action.rs" [[test]] name = "test_dependabot_v2" path = "tests/test_dependabot_v2.rs" [[test]] name = "test_workflow" path = "tests/test_workflow.rs" [dependencies.indexmap] version = "2.7.0" features = ["serde"] [dependencies.serde] version = "1.0.193" features = ["derive"] [dependencies.serde_yaml] version = "0.9.29" [dev-dependencies] github-actions-models-0.14.0/Cargo.toml.orig000064400000000000000000000010531046102023000167750ustar 00000000000000[package] name = "github-actions-models" authors = ["William Woodruff "] description = "Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components" version = "0.14.0" edition = "2021" license = "MIT" repository = "https://github.com/woodruffw/github-actions-models" keywords = ["github", "ci"] categories = ["api-bindings"] [dependencies] indexmap = { version = "2.7.0", features = ["serde"] } serde = { version = "1.0.193", features = ["derive"] } serde_yaml = "0.9.29" [dev-dependencies] github-actions-models-0.14.0/LICENSE000064400000000000000000000021251046102023000151140ustar 00000000000000The MIT License (MIT) Copyright (c) 2024 William Woodruff Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. github-actions-models-0.14.0/README.md000064400000000000000000000016751046102023000153770ustar 00000000000000github-actions-models ===================== [![CI](https://github.com/woodruffw/github-actions-models/actions/workflows/ci.yml/badge.svg)](https://github.com/woodruffw/github-actions-models/actions/workflows/ci.yml) [![Crates.io](https://img.shields.io/crates/v/github-actions-models)](https://crates.io/crates/github-actions-models) Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components. ## Why? I need these for [another tool], and generating them automatically from [their JSON Schemas] wasn't working both for expressiveness and tool deficiency reasons. [another tool]: https://github.com/woodruffw/zizmor [their JSON Schemas]: https://www.schemastore.org/json/ ## License MIT License. The integration tests for this crate contain sample workflows collected from various GitHub repositories; these contain comments linking them to their original repositories and are licensed under the terms there. github-actions-models-0.14.0/src/action.rs000064400000000000000000000124241046102023000165240ustar 00000000000000//! Data models for GitHub Actions action definitions. //! //! Resources: //! * [Metadata syntax for GitHub Actions] //! * [JSON Schema definition for GitHub Actions] //! //! [Metadata syntax for GitHub Actions]: https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions //! [JSON Schema definition for GitHub Actions]: https://json.schemastore.org/github-action.json use indexmap::IndexMap; use serde::Deserialize; use crate::common::{expr::BoE, Env, If}; /// A GitHub Actions action definition. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Action { pub name: String, pub author: Option, pub description: Option, #[serde(default)] pub inputs: IndexMap, #[serde(default)] pub outputs: IndexMap, pub runs: Runs, } /// An action input. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Input { pub description: String, pub required: Option, pub default: Option, } /// An action output. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Output { pub description: String, // NOTE: not optional for composite actions, but this is not worth modeling. pub value: Option, } /// An action `runs` definition. /// /// A `runs` definition can be either a JavaScript action, a "composite" action /// (made up of several constituent actions), or a Docker action. #[derive(Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum Runs { JavaScript(JavaScript), Composite(Composite), Docker(Docker), } /// A `runs` definition for a JavaScript action. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct JavaScript { /// The Node runtime to use for this action. This is one of: /// /// `"node12" | "node16" | "node20"` pub using: String, /// The action's entrypoint, as a JavaScript file. pub main: String, /// An optional script to run, before [`JavaScript::main`]. pub pre: Option, /// An optional expression that triggers [`JavaScript::pre`] if it evaluates to `true`. /// /// If not present, defaults to `always()` pub pre_if: Option, /// An optional script to run, after [`JavaScript::main`]. pub post: Option, /// An optional expression that triggers [`JavaScript::post`] if it evaluates to `true`. /// /// If not present, defaults to `always()` pub post_if: Option, } /// A `runs` definition for a composite action. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Composite { /// Invariant: `"composite"` pub using: String, /// The individual steps that make up this composite action. pub steps: Vec, } /// An individual composite action step. #[derive(Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum Step { RunShell(RunShell), UseAction(UseAction), } /// A step that runs a command in a shell. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct RunShell { /// The command to run. pub run: String, /// The shell to run in. pub shell: String, /// An optional name for this step. pub name: Option, /// An optional ID for this step. pub id: Option, /// An optional expression that prevents this step from running unless it evaluates to `true`. pub r#if: Option, /// An optional environment mapping for this step. #[serde(default)] pub env: Env, /// A an optional boolean or expression that, if `true`, prevents the job from failing when /// this step fails. #[serde(default)] pub continue_on_error: BoE, /// An optional working directory to run [`RunShell::run`] from. pub working_directory: Option, } /// A step that uses another GitHub Action. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct UseAction { /// The GitHub Action being used. pub uses: String, /// Any inputs to the action being used. #[serde(default)] pub with: IndexMap, /// An optional expression that prevents this step from running unless it evaluates to `true`. pub r#if: Option, } /// A `runs` definition for a Docker action. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Docker { /// Invariant: `"docker"` pub using: String, /// The Docker image to use. pub image: String, /// An optional environment mapping for this step. #[serde(default)] pub env: Env, /// An optional Docker entrypoint, potentially overriding the image's /// default entrypoint. pub entrypoint: Option, /// An optional "pre" entrypoint to run, before [`Docker::entrypoint`]. pub pre_entrypoint: Option, /// An optional expression that triggers [`Docker::pre_entrypoint`] if it evaluates to `true`. /// /// If not present, defaults to `always()` pub pre_if: Option, /// An optional "post" entrypoint to run, after [`Docker::entrypoint`] or the default /// entrypoint. pub post_entrypoint: Option, /// An optional expression that triggers [`Docker::post_entrypoint`] if it evaluates to `true`. /// /// If not present, defaults to `always()` pub post_if: Option, } github-actions-models-0.14.0/src/common/expr.rs000064400000000000000000000077471046102023000175310ustar 00000000000000//! GitHub Actions expression parsing and handling. use serde::{Deserialize, Serialize}; /// An explicit GitHub Actions expression, fenced by `${{ }}`. #[derive(Debug, PartialEq, Serialize)] pub struct ExplicitExpr(String); impl ExplicitExpr { /// Construct an `ExplicitExpr` from the given string, consuming it /// in the process. /// /// Returns `None` if the input is not a valid explicit expression. pub fn from_curly(expr: impl Into) -> Option { // Invariant preservation: we store the full string, but // we expect it to be a well-formed expression. let expr = expr.into(); let trimmed = expr.trim(); if !trimmed.starts_with("${{") || !trimmed.ends_with("}}") { return None; } Some(ExplicitExpr(expr)) } /// Return the original string underlying this expression, including /// its exact whitespace and curly delimiters. pub fn as_raw(&self) -> &str { &self.0 } /// Return the "curly" form of this expression, with leading and trailing /// whitespace removed. /// /// Whitespace *within* the expression body is not removed or normalized. pub fn as_curly(&self) -> &str { self.as_raw().trim() } /// Return the "bare" form of this expression, i.e. the `body` within /// `${{ body }}`. Leading and trailing whitespace within /// the expression body is removed. pub fn as_bare(&self) -> &str { self.as_curly() .strip_prefix("${{") .and_then(|e| e.strip_suffix("}}")) .map(|e| e.trim()) .expect("invariant violated: ExplicitExpr must be an expression") } } impl<'de> Deserialize<'de> for ExplicitExpr { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let raw = String::deserialize(deserializer)?; let Some(expr) = Self::from_curly(raw) else { return Err(serde::de::Error::custom( "invalid expression: expected '${{' and '}}' delimiters", )); }; Ok(expr) } } /// A "literal or expr" type, for places in GitHub Actions where a /// key can either have a literal value (array, object, etc.) or an /// expression string. #[derive(Debug, Deserialize, PartialEq, Serialize)] #[serde(untagged)] pub enum LoE { // Observe that `Expr` comes first, since `LoE` should always // attempt to parse as an expression before falling back on a literal // string. Expr(ExplicitExpr), Literal(T), } impl Default for LoE where T: Default, { fn default() -> Self { Self::Literal(T::default()) } } /// A convenience alias for a `bool` literal or an actions expression. pub type BoE = LoE; #[cfg(test)] mod tests { use super::{ExplicitExpr, LoE}; #[test] fn test_expr_invalid() { let cases = &[ "not an expression", "${{ missing end ", "missing beginning }}", ]; for case in cases { let case = format!("\"{case}\""); assert!(serde_yaml::from_str::(&case).is_err()); } } #[test] fn test_expr() { let expr = "\" ${{ foo }} \\t \""; let expr: ExplicitExpr = serde_yaml::from_str(expr).unwrap(); assert_eq!(expr.as_bare(), "foo"); } #[test] fn test_loe() { let lit = "\"normal string\""; assert_eq!( serde_yaml::from_str::>(lit).unwrap(), LoE::Literal("normal string".to_string()) ); let expr = "\"${{ expr }}\""; assert!(matches!( serde_yaml::from_str::>(expr).unwrap(), LoE::Expr(_) )); // Invalid expr deserializes as string. let invalid = "\"${{ invalid \""; assert_eq!( serde_yaml::from_str::>(invalid).unwrap(), LoE::Literal("${{ invalid ".to_string()) ); } } github-actions-models-0.14.0/src/common.rs000064400000000000000000000115111046102023000165330ustar 00000000000000//! Shared models and utilities. use std::fmt::Display; use indexmap::IndexMap; use serde::{Deserialize, Deserializer, Serialize}; pub mod expr; /// `permissions` for a workflow, job, or step. #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case", untagged)] pub enum Permissions { /// Base, i.e. blanket permissions. Base(BasePermission), /// Fine-grained permissions. /// /// These are modeled with an open-ended mapping rather than a structure /// to make iteration over all defined permissions easier. Explicit(IndexMap), } impl Default for Permissions { fn default() -> Self { Self::Base(BasePermission::Default) } } /// "Base" permissions, where all individual permissions are configured /// with a blanket setting. #[derive(Deserialize, Default, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum BasePermission { /// Whatever default permissions come from the workflow's `GITHUB_TOKEN`. #[default] Default, /// "Read" access to all resources. ReadAll, /// "Write" access to all resources (implies read). WriteAll, } /// A singular permission setting. #[derive(Deserialize, Default, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum Permission { /// Read access. Read, /// Write access. Write, /// No access. #[default] None, } /// An environment mapping. pub type Env = IndexMap; /// Environment variable values are always strings, but GitHub Actions /// allows users to configure them as various native YAML types before /// internal stringification. #[derive(Serialize, Deserialize, Debug, PartialEq)] #[serde(untagged)] pub enum EnvValue { // Missing values are empty strings. #[serde(deserialize_with = "null_to_default")] String(String), Number(f64), Boolean(bool), } impl Display for EnvValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::String(s) => write!(f, "{s}"), Self::Number(n) => write!(f, "{n}"), Self::Boolean(b) => write!(f, "{b}"), } } } /// A "scalar or vector" type, for places in GitHub Actions where a /// key can have either a scalar value or an array of values. /// /// This only appears internally, as an intermediate type for `scalar_or_vector`. #[derive(Debug, Deserialize, PartialEq)] #[serde(untagged)] enum SoV { One(T), Many(Vec), } impl From> for Vec { fn from(val: SoV) -> Vec { match val { SoV::One(v) => vec![v], SoV::Many(vs) => vs, } } } pub(crate) fn scalar_or_vector<'de, D, T>(de: D) -> Result, D::Error> where D: Deserializer<'de>, T: Deserialize<'de>, { SoV::deserialize(de).map(Into::into) } /// A bool or string. This is useful for cases where GitHub Actions contextually /// reinterprets a YAML boolean as a string, e.g. `run: true` really means /// `run: 'true'`. #[derive(Debug, Deserialize, PartialEq)] #[serde(untagged)] enum BoS { Bool(bool), String(String), } impl From for String { fn from(value: BoS) -> Self { match value { BoS::Bool(b) => b.to_string(), BoS::String(s) => s, } } } /// An `if:` condition in a job or action definition. /// /// These are either booleans or bare (i.e. non-curly) expressions. #[derive(Debug, Deserialize, PartialEq, Serialize)] #[serde(untagged)] pub enum If { Bool(bool), // NOTE: condition expressions can be either "bare" or "curly", so we can't // use `BoE` or anything else that assumes curly-only here. Expr(String), } pub(crate) fn bool_is_string<'de, D>(de: D) -> Result where D: Deserializer<'de>, { BoS::deserialize(de).map(Into::into) } fn null_to_default<'de, D, T>(de: D) -> Result where D: Deserializer<'de>, T: Default + Deserialize<'de>, { let key = Option::::deserialize(de)?; Ok(key.unwrap_or_default()) } #[cfg(test)] mod tests { use indexmap::IndexMap; use crate::common::{BasePermission, Env, EnvValue, Permission}; use super::Permissions; #[test] fn test_permissions() { assert_eq!( serde_yaml::from_str::("read-all").unwrap(), Permissions::Base(BasePermission::ReadAll) ); let perm = "security-events: write"; assert_eq!( serde_yaml::from_str::(perm).unwrap(), Permissions::Explicit(IndexMap::from([( "security-events".into(), Permission::Write )])) ); } #[test] fn test_env_empty_value() { let env = "foo:"; assert_eq!( serde_yaml::from_str::(env).unwrap()["foo"], EnvValue::String("".into()) ); } } github-actions-models-0.14.0/src/dependabot/mod.rs000064400000000000000000000001011046102023000201200ustar 00000000000000//! Data models for Dependabot configuration files. pub mod v2; github-actions-models-0.14.0/src/dependabot/v2.rs000064400000000000000000000164331046102023000177070ustar 00000000000000//! "v2" Dependabot models. //! //! Resources: //! * [Configuration options for the `dependabot.yml` file](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file) //! * [JSON Schema for Dependabot v2](https://json.schemastore.org/dependabot-2.0.json) use indexmap::{IndexMap, IndexSet}; use serde::Deserialize; /// A `dependabot.yml` configuration file. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct Dependabot { /// Invariant: `2` pub version: u64, #[serde(default)] pub enable_beta_ecosystems: bool, #[serde(default)] pub registries: IndexMap, pub updates: Vec, } /// Different registries known to Dependabot. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case", tag = "type")] pub enum Registry { ComposerRepository { url: String, username: Option, password: Option, }, DockerRegistry { url: String, username: Option, password: Option, #[serde(default)] replaces_base: bool, }, Git { url: String, username: Option, password: Option, }, HexOrganization { organization: String, key: Option, }, HexRepository { repo: Option, url: String, auth_key: Option, public_key_fingerprint: Option, }, MavenRepository { url: String, username: Option, password: Option, }, NpmRegistry { url: String, username: Option, password: Option, #[serde(default)] replaces_base: bool, }, NugetFeed { url: String, username: Option, password: Option, }, PythonIndex { url: String, username: Option, password: Option, #[serde(default)] replaces_base: bool, }, RubygemsServer { url: String, username: Option, password: Option, #[serde(default)] replaces_base: bool, }, TerraformRegistry { url: String, token: Option, }, } /// A single `update` directive. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct Update { #[serde(default)] pub allow: Vec, #[serde(default)] pub assignees: IndexSet, pub commit_message: Option, pub directory: String, #[serde(default)] pub groups: IndexMap, #[serde(default)] pub ignore: Vec, #[serde(default)] pub insecure_external_code_execution: AllowDeny, /// Labels to apply to this update group's pull requests. /// /// The default label is `dependencies`. #[serde(default = "default_labels")] pub labels: IndexSet, pub milestone: Option, /// The maximum number of pull requests to open at a time from this /// update group. /// /// The default maximum is 5. #[serde(default = "default_open_pull_requests_limit")] pub open_pull_requests_limit: u64, pub package_ecosystem: PackageEcosystem, // TODO: pull-request-branch-name #[serde(default)] pub rebase_strategy: RebaseStrategy, #[serde(default, deserialize_with = "crate::common::scalar_or_vector")] pub registries: Vec, #[serde(default)] pub reviewers: IndexSet, pub schedule: Schedule, pub target_branch: Option, #[serde(default)] pub vendor: bool, pub versioning_strategy: Option, } #[inline] fn default_labels() -> IndexSet { IndexSet::from(["dependencies".to_string()]) } #[inline] fn default_open_pull_requests_limit() -> u64 { // https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#open-pull-requests-limit 5 } /// Allow rules for Dependabot updates. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct Allow { pub dependency_name: Option, pub dependency_type: Option, } /// Dependency types in `allow` rules. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub enum DependencyType { Direct, Indirect, All, Production, Development, } /// Commit message settings for Dependabot updates. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct CommitMessage { pub prefix: Option, pub prefix_development: Option, /// Invariant: `"scope"` pub include: Option, } /// Group settings for batched updates. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct Group { /// This can only be [`DependencyType::Development`] or /// [`DependencyType::Production`]. pub dependency_type: Option, #[serde(default)] pub patterns: IndexSet, #[serde(default)] pub exclude_patterns: IndexSet, #[serde(default)] pub update_types: IndexSet, } /// Update types for grouping. #[derive(Deserialize, Debug, Hash, Eq, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum UpdateType { Major, Minor, Patch, } /// Dependency ignore settings for updates. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct Ignore { pub dependency_name: Option, /// These are, inexplicably, not [`UpdateType`] variants. /// Instead, they're strings like `"version-update:semver-{major,minor,patch}"`. #[serde(default)] pub update_types: IndexSet, #[serde(default)] pub versions: IndexSet, } /// An "allow"/"deny" toggle. #[derive(Deserialize, Debug, Default)] #[serde(rename_all = "kebab-case")] pub enum AllowDeny { Allow, #[default] Deny, } /// Supported packaging ecosystems. #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum PackageEcosystem { Bundler, Cargo, Composer, Docker, Elm, Gitsubmodule, GithubActions, Gomod, Gradle, Maven, Mix, Npm, Nuget, Pip, Pub, Swift, Terraform, } /// Rebase strategies for Dependabot updates. #[derive(Deserialize, Debug, Default, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum RebaseStrategy { #[default] Auto, Disabled, } /// Scheduling settings for Dependabot updates. #[derive(Deserialize, Debug)] #[serde(rename_all = "kebab-case")] pub struct Schedule { pub interval: Interval, pub day: Option, pub time: Option, pub timezone: Option, } /// Schedule intervals. #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum Interval { Daily, Weekly, Monthly, } /// Days of the week. #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum Day { Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday, } /// Versioning strategies. #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum VersioningStrategy { Auto, Increase, IncreaseIfNecessary, LockfileOnly, Widen, } github-actions-models-0.14.0/src/lib.rs000064400000000000000000000003751046102023000160170ustar 00000000000000//! High-quality data models for GitHub Actions and associated machinery. #![deny(rustdoc::broken_intra_doc_links)] #![allow(clippy::redundant_field_names)] #![forbid(unsafe_code)] pub mod action; pub mod common; pub mod dependabot; pub mod workflow; github-actions-models-0.14.0/src/workflow/event.rs000064400000000000000000000165461046102023000202530ustar 00000000000000//! Workflow events. use indexmap::IndexMap; use serde::Deserialize; /// "Bare" workflow event triggers. /// /// These appear when a workflow is triggered with an event with no context, /// e.g.: /// /// ```yaml /// on: push /// ``` #[derive(Deserialize, PartialEq, Eq, Hash)] #[serde(rename_all = "snake_case")] pub enum BareEvent { BranchProtectionRule, CheckRun, CheckSuite, Create, Delete, Deployment, DeploymentStatus, Discussion, DiscussionComment, Fork, Gollum, IssueComment, Issues, Label, MergeGroup, Milestone, PageBuild, Project, ProjectCard, ProjectColumn, Public, PullRequest, PullRequestComment, PullRequestReview, PullRequestReviewComment, PullRequestTarget, Push, RegistryPackage, Release, RepositoryDispatch, // NOTE: `schedule` is omitted, since it's never bare. Status, Watch, WorkflowCall, WorkflowDispatch, WorkflowRun, } /// Workflow event triggers, with bodies. /// /// Like [`BareEvent`], but with per-event properties. #[derive(Default, Deserialize)] #[serde(default, rename_all = "snake_case")] pub struct Events { pub branch_protection_rule: OptionalBody, pub check_run: OptionalBody, pub check_suite: OptionalBody, // TODO: create + delete // TODO: deployment + deployment_status pub discussion: OptionalBody, pub discussion_comment: OptionalBody, // TODO: fork + gollum pub issue_comment: OptionalBody, pub issues: OptionalBody, pub label: OptionalBody, pub merge_group: OptionalBody, pub milestone: OptionalBody, // TODO: page_build pub project: OptionalBody, pub project_card: OptionalBody, pub project_column: OptionalBody, // TODO: public pub pull_request: OptionalBody, pub pull_request_comment: OptionalBody, pub pull_request_review: OptionalBody, pub pull_request_review_comment: OptionalBody, // NOTE: `pull_request_target` appears to have the same trigger filters as `pull_request`. pub pull_request_target: OptionalBody, pub push: OptionalBody, pub registry_package: OptionalBody, pub release: OptionalBody, pub repository_dispatch: OptionalBody, pub schedule: OptionalBody>, // TODO: status pub watch: OptionalBody, pub workflow_call: OptionalBody, // TODO: Custom type. pub workflow_dispatch: OptionalBody, pub workflow_run: OptionalBody, } /// A generic container type for distinguishing between /// a missing key, an explicitly null key, and an explicit value `T`. /// /// This is needed for modeling `on:` triggers, since GitHub distinguishes /// between the non-presence of an event (no trigger) and the presence /// of an empty event body (e.g. `pull_request:`), which means "trigger /// with the defaults for this event type." #[derive(Default)] pub enum OptionalBody { Default, #[default] Missing, Body(T), } impl<'de, T> Deserialize<'de> for OptionalBody where T: Deserialize<'de>, { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { Option::deserialize(deserializer).map(Into::into) } } impl From> for OptionalBody { fn from(value: Option) -> Self { match value { Some(v) => OptionalBody::Body(v), None => OptionalBody::Default, } } } /// A generic event trigger body. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct GenericEvent { #[serde(default, deserialize_with = "crate::common::scalar_or_vector")] pub types: Vec, } /// The body of a `pull_request` event trigger. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct PullRequest { #[serde(default)] pub types: Vec, #[serde(flatten)] pub branch_filters: Option, #[serde(flatten)] pub path_filters: Option, } /// The body of a `push` event trigger. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Push { #[serde(flatten)] pub branch_filters: Option, #[serde(flatten)] pub path_filters: Option, #[serde(flatten)] pub tag_filters: Option, } /// The body of a `cron` event trigger. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Cron { pub cron: String, } /// The body of a `workflow_call` event trigger. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowCall { #[serde(default)] pub inputs: IndexMap, #[serde(default)] pub outputs: IndexMap, #[serde(default)] pub secrets: IndexMap, } /// A single input in a `workflow_call` event trigger body. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowCallInput { pub description: Option, // TODO: model `default`? #[serde(default)] pub required: bool, pub r#type: String, } /// A single output in a `workflow_call` event trigger body. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowCallOutput { pub description: Option, pub value: String, } /// A single secret in a `workflow_call` event trigger body. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowCallSecret { pub description: Option, pub required: bool, } /// The body of a `workflow_dispatch` event trigger. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowDispatch { #[serde(default)] pub inputs: IndexMap, // TODO: WorkflowDispatchInput } /// A single input in a `workflow_dispatch` event trigger body. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowDispatchInput { pub description: Option, // TODO: model `default`? #[serde(default)] pub required: bool, // TODO: Model as boolean, choice, number, environment, string; default is string. pub r#type: Option, // Only present when `type` is `choice`. #[serde(default)] pub options: Vec, } /// The body of a `workflow_run` event trigger. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct WorkflowRun { pub workflows: Vec, #[serde(default)] pub types: Vec, #[serde(flatten)] pub branch_filters: Option, } /// Branch filtering variants for event trigger bodies. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub enum BranchFilters { Branches(Vec), BranchesIgnore(Vec), } /// Tag filtering variants for event trigger bodies. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub enum TagFilters { Tags(Vec), TagsIgnore(Vec), } /// Path filtering variants for event trigger bodies. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub enum PathFilters { Paths(Vec), PathsIgnore(Vec), } github-actions-models-0.14.0/src/workflow/job.rs000064400000000000000000000151431046102023000176740ustar 00000000000000//! Workflow jobs. use indexmap::IndexMap; use serde::{de, Deserialize, Serialize}; use serde_yaml::Value; use crate::common::expr::{BoE, LoE}; use crate::common::{Env, If, Permissions}; use super::{Concurrency, Defaults}; /// A "normal" GitHub Actions workflow job, i.e. a job composed of one /// or more steps on a runner. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct NormalJob { pub name: Option, #[serde(default)] pub permissions: Permissions, #[serde(default, deserialize_with = "crate::common::scalar_or_vector")] pub needs: Vec, pub r#if: Option, pub runs_on: LoE, pub environment: Option, pub concurrency: Option, #[serde(default)] pub outputs: IndexMap, #[serde(default)] pub env: LoE, pub defaults: Option, pub steps: Vec, pub timeout_minutes: Option>, pub strategy: Option, #[serde(default)] pub continue_on_error: BoE, pub container: Option, #[serde(default)] pub services: IndexMap, } #[derive(Debug, Deserialize, PartialEq)] #[serde(rename_all = "kebab-case", untagged, remote = "Self")] pub enum RunsOn { #[serde(deserialize_with = "crate::common::scalar_or_vector")] Target(Vec), Group { group: Option, // NOTE(ww): serde struggles with the null/empty case for custom // deserializers, so we help it out by telling it that it can default // to Vec::default. #[serde(deserialize_with = "crate::common::scalar_or_vector", default)] labels: Vec, }, } impl<'de> Deserialize<'de> for RunsOn { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { let runs_on = Self::deserialize(deserializer)?; // serde lacks the ability to do inter-field invariants at the derive // layer, so we enforce the invariant that a `RunsOn::Group` // has either a `group` or at least one label here. if let RunsOn::Group { group, labels } = &runs_on { if group.is_none() && labels.is_empty() { return Err(de::Error::custom( "runs-on must provide either `group` or one or more `labels`", )); } } Ok(runs_on) } } #[derive(Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum DeploymentEnvironment { Name(String), NameURL { name: String, url: Option }, } #[derive(Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Step { pub id: Option, pub r#if: Option, pub name: Option, pub timeout_minutes: Option, #[serde(default)] pub continue_on_error: BoE, #[serde(flatten)] pub body: StepBody, } #[derive(Serialize, Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum StepBody { Uses { uses: String, #[serde(default)] with: Env, }, Run { #[serde(deserialize_with = "crate::common::bool_is_string")] run: String, working_directory: Option, shell: Option, #[serde(default)] env: LoE, }, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Strategy { pub matrix: Option>, pub fail_fast: Option, pub max_parallel: Option, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Matrix { #[serde(default)] pub include: LoE>>, #[serde(default)] pub exclude: LoE>>, #[serde(flatten)] pub dimensions: LoE>>>, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum Container { Name(String), Container { image: String, credentials: Option, #[serde(default)] env: LoE, // TODO: model `ports`? #[serde(default)] volumes: Vec, options: Option, }, } #[derive(Deserialize)] pub struct DockerCredentials { pub username: Option, pub password: Option, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct ReusableWorkflowCallJob { pub name: Option, #[serde(default)] pub permissions: Permissions, #[serde(default, deserialize_with = "crate::common::scalar_or_vector")] pub needs: Vec, pub r#if: Option, pub uses: String, #[serde(default)] pub with: Env, pub secrets: Option, } #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum Secrets { Inherit, #[serde(untagged)] Env(#[serde(default)] Env), } #[cfg(test)] mod tests { use crate::{ common::{expr::LoE, EnvValue}, workflow::job::{Matrix, Secrets}, }; use super::{RunsOn, Strategy}; #[test] fn test_secrets() { assert_eq!( serde_yaml::from_str::("inherit").unwrap(), Secrets::Inherit ); let secrets = "foo-secret: bar"; let Secrets::Env(secrets) = serde_yaml::from_str::(secrets).unwrap() else { panic!("unexpected secrets variant"); }; assert_eq!(secrets["foo-secret"], EnvValue::String("bar".into())); } #[test] fn test_strategy_matrix_expressions() { let strategy = "matrix: ${{ 'foo' }}"; let Strategy { matrix: Some(LoE::Expr(expr)), .. } = serde_yaml::from_str::(strategy).unwrap() else { panic!("unexpected matrix variant"); }; assert_eq!(expr.as_curly(), "${{ 'foo' }}"); let strategy = r#" matrix: foo: ${{ 'foo' }} "#; let Strategy { matrix: Some(LoE::Literal(Matrix { include: _, exclude: _, dimensions: LoE::Literal(dims), })), .. } = serde_yaml::from_str::(strategy).unwrap() else { panic!("unexpected matrix variant"); }; assert!(matches!(dims.get("foo"), Some(LoE::Expr(_)))); } #[test] fn test_runson_invalid_state() { let runson = "group: \nlabels: []"; assert_eq!( serde_yaml::from_str::(runson) .unwrap_err() .to_string(), "runs-on must provide either `group` or one or more `labels`" ); } } github-actions-models-0.14.0/src/workflow/mod.rs000064400000000000000000000077651046102023000177140ustar 00000000000000//! Data models for GitHub Actions workflow definitions. //! //! Resources: //! * [Workflow syntax for GitHub Actions] //! * [JSON Schema definition for workflows] //! //! [Workflow Syntax for GitHub Actions]: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions> //! [JSON Schema definition for workflows]: https://json.schemastore.org/github-workflow.json use indexmap::IndexMap; use serde::Deserialize; use crate::common::{expr::BoE, Env, Permissions}; pub mod event; pub mod job; /// A single GitHub Actions workflow. #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Workflow { pub name: Option, pub run_name: Option, pub on: Trigger, #[serde(default)] pub permissions: Permissions, #[serde(default)] pub env: Env, pub defaults: Option, pub concurrency: Option, pub jobs: IndexMap, } /// The triggering condition or conditions for a workflow. /// /// Workflow triggers take three forms: /// /// 1. A single webhook event name: /// /// ```yaml /// on: push /// ``` /// 2. A list of webhook event names: /// /// ```yaml /// on: [push, fork] /// ``` /// /// 3. A mapping of event names with (optional) configurations: /// /// ```yaml /// on: /// push: /// branches: [main] /// pull_request: /// ``` #[derive(Deserialize)] #[serde(rename_all = "snake_case", untagged)] pub enum Trigger { BareEvent(event::BareEvent), BareEvents(Vec), Events(Box), } #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Defaults { pub run: Option, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case")] pub struct RunDefaults { pub shell: Option, pub working_directory: Option, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum Concurrency { Bare(String), Rich { group: String, #[serde(default)] cancel_in_progress: BoE, }, } #[derive(Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum Job { NormalJob(Box), ReusableWorkflowCallJob(Box), } impl Job { /// Returns the optional `name` field common to both reusable and normal /// job definitions. pub fn name(&self) -> Option<&str> { match self { Self::NormalJob(job) => job.name.as_deref(), Self::ReusableWorkflowCallJob(job) => job.name.as_deref(), } } } #[cfg(test)] mod tests { use crate::workflow::event::{OptionalBody, WorkflowCall, WorkflowDispatch}; use super::{Concurrency, Trigger}; #[test] fn test_concurrency() { let bare = "foo"; let concurrency: Concurrency = serde_yaml::from_str(bare).unwrap(); assert!(matches!(concurrency, Concurrency::Bare(_))); let rich = "group: foo\ncancel-in-progress: true"; let concurrency: Concurrency = serde_yaml::from_str(rich).unwrap(); assert!(matches!( concurrency, Concurrency::Rich { group: _, cancel_in_progress: _ } )); } #[test] fn test_workflow_triggers() { let on = " issues: workflow_dispatch: inputs: foo: type: string workflow_call: inputs: bar: type: string pull_request_target: "; let trigger: Trigger = serde_yaml::from_str(on).unwrap(); let Trigger::Events(events) = trigger else { panic!("wrong trigger type"); }; assert!(matches!(events.issues, OptionalBody::Default)); assert!(matches!( events.workflow_dispatch, OptionalBody::Body(WorkflowDispatch { .. }) )); assert!(matches!( events.workflow_call, OptionalBody::Body(WorkflowCall { .. }) )); assert!(matches!(events.pull_request_target, OptionalBody::Default)); } } github-actions-models-0.14.0/tests/sample-actions/gh-action-pip-audit.yml000064400000000000000000000066401046102023000244640ustar 00000000000000# https://github.com/pypa/gh-action-pip-audit/blob/530374b67a3e8b3972d2caae7ee9a1d3dd486329/action.yml name: "gh-action-pip-audit" author: "William Woodruff " description: "Use pip-audit to scan Python dependencies for known vulnerabilities" inputs: summary: description: "render a Markdown summary of the audit (default true)" required: false default: true no-deps: description: "don't do any dependency resolution (requires fully pinned requirements) (default false)" required: false default: false require-hashes: description: "enforce hashes (requirements-style inputs only) (default false)" required: false default: false vulnerability-service: description: "the vulnerability service to use (PyPI or OSV, defaults to PyPI)" required: false default: "PyPI" inputs: description: "the inputs to audit, whitespace separated (defaults to current path)" required: false default: "" virtual-environment: description: "the virtual environment to audit within (default none)" required: false default: "" local: description: "for environmental audits, consider only packages marked local (default false)" required: false default: false index-url: description: "the base URL for the PEP 503-compatible package index to use" required: false default: "" extra-index-urls: description: "extra PEP 503-compatible indexes to use, whitespace separated" required: false default: "" ignore-vulns: description: "vulnerabilities to explicitly exclude, if present (whitespace separated)" required: false default: "" internal-be-careful-allow-failure: description: "don't fail the job if the audit fails (default false)" required: false default: false internal-be-careful-extra-flags: description: "extra flags to be passed in to pip-audit" required: false default: "" outputs: internal-be-careful-output: description: "the column-formatted output from pip-audit, wrapped as base64" value: "${{ steps.pip-audit.outputs.output }}" runs: using: "composite" steps: - name: Set up pip-audit run: | # NOTE: Sourced, not executed as a script. source "${{ github.action_path }}/setup/setup.bash" env: GHA_PIP_AUDIT_VIRTUAL_ENVIRONMENT: "${{ inputs.virtual-environment }}" shell: bash - name: Run pip-audit id: pip-audit run: | # NOTE: Sourced, not executed as a script. source "${{ github.action_path }}/setup/venv.bash" python "${{ github.action_path }}/action.py" "${{ inputs.inputs }}" env: GHA_PIP_AUDIT_SUMMARY: "${{ inputs.summary }}" GHA_PIP_AUDIT_NO_DEPS: "${{ inputs.no-deps }}" GHA_PIP_AUDIT_REQUIRE_HASHES: "${{ inputs.require-hashes }}" GHA_PIP_AUDIT_VULNERABILITY_SERVICE: "${{ inputs.vulnerability-service }}" GHA_PIP_AUDIT_VIRTUAL_ENVIRONMENT: "${{ inputs.virtual-environment }}" GHA_PIP_AUDIT_LOCAL: "${{ inputs.local }}" GHA_PIP_AUDIT_INDEX_URL: "${{ inputs.index-url }}" GHA_PIP_AUDIT_EXTRA_INDEX_URLS: "${{ inputs.extra-index-urls }}" GHA_PIP_AUDIT_IGNORE_VULNS: "${{ inputs.ignore-vulns }}" GHA_PIP_AUDIT_INTERNAL_BE_CAREFUL_ALLOW_FAILURE: "${{ inputs.internal-be-careful-allow-failure }}" GHA_PIP_AUDIT_INTERNAL_BE_CAREFUL_EXTRA_FLAGS: "${{ inputs.internal-be-careful-extra-flags }}" shell: bash github-actions-models-0.14.0/tests/sample-actions/gh-action-pypi-publish.yml000064400000000000000000000063271046102023000252170ustar 00000000000000# https://github.com/pypa/gh-action-pypi-publish/blob/2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf/action.yml --- name: pypi-publish description: Upload Python distribution packages to PyPI inputs: user: description: PyPI user required: false default: __token__ password: description: Password for your PyPI user or an access token required: false repository-url: # Canonical alias for `repository_url` description: The repository URL to use required: false repository_url: # DEPRECATED ALIAS; TODO: Remove in v3+ description: >- [DEPRECATED] The repository URL to use deprecationMessage: >- The inputs have been normalized to use kebab-case. Use `repository-url` instead. required: false default: https://upload.pypi.org/legacy/ packages-dir: # Canonical alias for `packages_dir` description: The target directory for distribution required: false # default: dist # TODO: uncomment once alias removed packages_dir: # DEPRECATED ALIAS; TODO: Remove in v3+ description: >- [DEPRECATED] The target directory for distribution deprecationMessage: >- The inputs have been normalized to use kebab-case. Use `packages-dir` instead. required: false default: dist verify-metadata: # Canonical alias for `verify_metadata` description: Check metadata before uploading required: false # default: 'true' # TODO: uncomment once alias removed verify_metadata: # DEPRECATED ALIAS; TODO: Remove in v3+ description: >- [DEPRECATED] Check metadata before uploading deprecationMessage: >- The inputs have been normalized to use kebab-case. Use `verify-metadata` instead. required: false default: "true" skip-existing: # Canonical alias for `skip_existing` description: >- Do not fail if a Python package distribution exists in the target package index required: false # default: 'false' # TODO: uncomment once alias removed skip_existing: # DEPRECATED ALIAS; TODO: Remove in v3+ description: >- [DEPRECATED] Do not fail if a Python package distribution exists in the target package index deprecationMessage: >- The inputs have been normalized to use kebab-case. Use `skip-existing` instead. required: false default: "false" verbose: description: Show verbose output. required: false default: "false" print-hash: # Canonical alias for `print_hash` description: Show hash values of files to be uploaded required: false # default: 'false' # TODO: uncomment once alias removed print_hash: # DEPRECATED ALIAS; TODO: Remove in v3+ description: >- [DEPRECATED] Show hash values of files to be uploaded deprecationMessage: >- The inputs have been normalized to use kebab-case. Use `print-hash` instead. required: false default: "false" branding: color: yellow icon: upload-cloud runs: using: docker image: Dockerfile args: - ${{ inputs.user }} - ${{ inputs.password }} - ${{ inputs.repository-url }} - ${{ inputs.packages-dir }} - ${{ inputs.verify-metadata }} - ${{ inputs.skip-existing }} - ${{ inputs.verbose }} - ${{ inputs.print-hash }} github-actions-models-0.14.0/tests/sample-actions/gh-action-sigstore-python.yml000064400000000000000000000131351046102023000257430ustar 00000000000000# https://github.com/sigstore/gh-action-sigstore-python/blob/b3690e3a279c94669b1e9e4e1e29317cdc7a52d5/action.yml # Copyright 2022 The Sigstore Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: "gh-action-sigstore-python" author: "Sigstore Authors " description: "Use sigstore-python to sign Python packages" inputs: inputs: description: "the files to sign, whitespace separated" required: true default: "" identity-token: description: "the OIDC identity token to use" required: false default: "" oidc-client-id: description: "the custom OpenID Connect client ID to use during OAuth2" required: false default: "" oidc-client-secret: description: "the custom OpenID Connect client secret to use during OAuth2" required: false default: "" signature: description: "write a single signature to the given file; does not work with multiple input files" required: false default: "" certificate: description: "write a single certificate to the given file; does not work with multiple input files" required: false default: "" bundle: description: "write a single Sigstore bundle to the given file; does not work with multiple input files" required: false default: "" fulcio-url: description: "the Fulcio instance to use (conflicts with `staging`)" required: false default: "" rekor-url: description: "the Rekor instance to use (conflicts with `staging`)" required: false default: "" ctfe: description: "a PEM-encoded public key for the CT log (conflicts with `staging`)" required: false default: "" rekor-root-pubkey: description: "a PEM-encoded root public key for Rekor itself (conflicts with `staging`)" required: false default: "" staging: description: "use sigstore's staging instances, instead of the default production instances" required: false default: false verify: description: "verify the generated signatures after signing" required: false default: false verify-cert-identity: description: | verify the identity in the signing certificate's Subject Alternative Name required if `verify` is enabled; has no effect otherwise. required: false default: "" verify-oidc-issuer: description: | verify the issuer extension of the signing certificate required if `verify` is enabled; has no effect otherwise. required: false default: "" upload-signing-artifacts: description: "upload all signing artifacts as workflow artifacts" required: false default: false release-signing-artifacts: description: "attach all signing artifacts as release assets" required: false default: false internal-be-careful-debug: description: "run with debug logs (default false)" required: false default: false runs: using: "composite" steps: - name: Set up sigstore-python run: | # NOTE: Sourced, not executed as a script. source "${GITHUB_ACTION_PATH}/setup/setup.bash" env: GHA_SIGSTORE_PYTHON_INTERNAL_BE_CAREFUL_DEBUG: "${{ inputs.internal-be-careful-debug }}" shell: bash - name: Run sigstore-python id: sigstore-python run: | ${GITHUB_ACTION_PATH}/action.py "${GHA_SIGSTORE_PYTHON_INPUTS}" env: # The year is 2023, and nonsense like this is still necessary on Windows. PYTHONUTF8: "1" GHA_SIGSTORE_PYTHON_IDENTITY_TOKEN: "${{ inputs.identity-token }}" GHA_SIGSTORE_PYTHON_SIGNATURE: "${{ inputs.signature }}" GHA_SIGSTORE_PYTHON_CERTIFICATE: "${{ inputs.certificate }}" GHA_SIGSTORE_PYTHON_BUNDLE: "${{ inputs.bundle }}" GHA_SIGSTORE_PYTHON_OIDC_CLIENT_ID: "${{ inputs.oidc-client-id }}" GHA_SIGSTORE_PYTHON_OIDC_CLIENT_SECRET: "${{ inputs.oidc-client-secret }}" GHA_SIGSTORE_PYTHON_FULCIO_URL: "${{ inputs.fulcio-url }}" GHA_SIGSTORE_PYTHON_REKOR_URL: "${{ inputs.rekor-url }}" GHA_SIGSTORE_PYTHON_CTFE: "${{ inputs.ctfe }}" GHA_SIGSTORE_PYTHON_REKOR_ROOT_PUBKEY: "${{ inputs.rekor-root-pubkey }}" GHA_SIGSTORE_PYTHON_STAGING: "${{ inputs.staging }}" GHA_SIGSTORE_PYTHON_VERIFY: "${{ inputs.verify }}" GHA_SIGSTORE_PYTHON_VERIFY_CERT_IDENTITY: "${{ inputs.verify-cert-identity }}" GHA_SIGSTORE_PYTHON_VERIFY_OIDC_ISSUER: "${{ inputs.verify-oidc-issuer }}" GHA_SIGSTORE_PYTHON_RELEASE_SIGNING_ARTIFACTS: "${{ inputs.release-signing-artifacts }}" GHA_SIGSTORE_PYTHON_INTERNAL_BE_CAREFUL_DEBUG: "${{ inputs.internal-be-careful-debug }}" GHA_SIGSTORE_PYTHON_INPUTS: "${{ inputs.inputs }}" shell: bash - uses: actions/upload-artifact@v4 if: inputs.upload-signing-artifacts == 'true' with: name: "signing-artifacts-${{ github.job }}" path: "${{ env.GHA_SIGSTORE_PYTHON_INTERNAL_SIGNING_ARTIFACTS }}" - uses: softprops/action-gh-release@v1 if: inputs.release-signing-artifacts == 'true' && github.event_name == 'release' && github.event.action == 'published' with: files: "${{ env.GHA_SIGSTORE_PYTHON_INTERNAL_SIGNING_ARTIFACTS }}" github-actions-models-0.14.0/tests/sample-actions/setup-python.yml000064400000000000000000000044551046102023000234020ustar 00000000000000# https://github.com/actions/setup-python/blob/e9d6f990972a57673cdb72ec29e19d42ba28880f/action.yml --- name: "Setup Python" description: "Set up a specific version of Python and add the command-line tools to the PATH." author: "GitHub" inputs: python-version: description: "Version range or exact version of Python or PyPy to use, using SemVer's version range syntax. Reads from .python-version if unset." python-version-file: description: "File containing the Python version to use. Example: .python-version" cache: description: "Used to specify a package manager for caching in the default directory. Supported values: pip, pipenv, poetry." required: false architecture: description: "The target architecture (x86, x64) of the Python or PyPy interpreter." check-latest: description: "Set this option if you want the action to check for the latest available version that satisfies the version spec." default: false token: description: "The token used to authenticate when fetching Python distributions from https://github.com/actions/python-versions. When running this action on github.com, the default value is sufficient. When running on GHES, you can pass a personal access token for github.com if you are experiencing rate limiting." default: ${{ github.server_url == 'https://github.com' && github.token || '' }} cache-dependency-path: description: "Used to specify the path to dependency files. Supports wildcards or a list of file names for caching multiple dependencies." update-environment: description: "Set this option if you want the action to update environment variables." default: true allow-prereleases: description: "When 'true', a version range passed to 'python-version' input will match prerelease versions if no GA versions are found. Only 'x.y' version range is supported for CPython." default: false outputs: python-version: description: "The installed Python or PyPy version. Useful when given a version range as input." cache-hit: description: "A boolean value to indicate a cache entry was found" python-path: description: "The absolute path to the Python or PyPy executable." runs: using: "node20" main: "dist/setup/index.js" post: "dist/cache-save/index.js" post-if: success() branding: icon: "code" color: "yellow" github-actions-models-0.14.0/tests/sample-dependabot/v2/pip-audit.yml000064400000000000000000000004521046102023000236040ustar 00000000000000# https://github.com/pypa/pip-audit/blob/cea0e0276f1208cfcbda0c5d69b4fc0c084747b2/.github/dependabot.yml version: 2 updates: - package-ecosystem: pip directory: / schedule: interval: daily - package-ecosystem: github-actions directory: / schedule: interval: daily github-actions-models-0.14.0/tests/sample-dependabot/v2/sigstore-python.yml000064400000000000000000000012531046102023000250660ustar 00000000000000# https://github.com/sigstore/sigstore-python/blob/e0168a781b87d059fde05dfdfe0ce82e564ce095/.github/dependabot.yml version: 2 updates: - package-ecosystem: pip directory: / schedule: interval: daily - package-ecosystem: github-actions directory: / schedule: interval: daily open-pull-requests-limit: 99 rebase-strategy: "disabled" groups: actions: patterns: - "*" - package-ecosystem: github-actions directory: .github/actions/upload-coverage/ schedule: interval: daily open-pull-requests-limit: 99 rebase-strategy: "disabled" groups: actions: patterns: - "*" github-actions-models-0.14.0/tests/sample-workflows/adafruit-circuitpython-run-tests.yml000064400000000000000000000042361046102023000277600ustar 00000000000000# https://github.com/adafruit/circuitpython/blob/24a8927d0ce1d77addd584bf809ab73cb0386e75/.github/workflows/run-tests.yml name: Run tests on: workflow_call: inputs: cp-version: required: true type: string jobs: run: runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: test: [all, mpy, native, native_mpy] env: CP_VERSION: ${{ inputs.cp-version }} MICROPY_CPYTHON3: python3.12 MICROPY_MICROPYTHON: ../ports/unix/build-coverage/micropython TEST_all: TEST_mpy: --via-mpy -d basics float micropython TEST_native: --emit native TEST_native_mpy: --via-mpy --emit native -d basics float micropython steps: - name: Set up repository uses: actions/checkout@v4 with: submodules: false show-progress: false fetch-depth: 1 - name: Set up python uses: actions/setup-python@v5 with: python-version: 3.12 - name: Set up submodules uses: ./.github/actions/deps/submodules with: target: tests - name: Set up external if: matrix.test == 'all' uses: ./.github/actions/deps/external - name: Set up mpy-cross uses: ./.github/actions/mpy_cross with: cp-version: ${{ inputs.cp-version }} - name: Build unix port run: make -C ports/unix VARIANT=coverage -j4 - name: Run tests run: ./run-tests.py -j4 ${{ env[format('TEST_{0}', matrix.test)] }} working-directory: tests - name: Print failure info run: ./run-tests.py -j4 --print-failures if: failure() working-directory: tests # Not working after MicroPython v1.23 merge. # - name: Build native modules # if: matrix.test == 'all' # run: | # make -C examples/natmod/features1 # make -C examples/natmod/features2 # make -C examples/natmod/heapq # make -C examples/natmod/random # make -C examples/natmod/re # - name: Test native modules # if: matrix.test == 'all' # run: ./run-natmodtests.py extmod/{heapq*,random*,re*}.py # working-directory: tests github-actions-models-0.14.0/tests/sample-workflows/false-condition.yml000064400000000000000000000002641046102023000243700ustar 00000000000000# https://github.com/woodruffw/zizmor/issues/209 name: Name on: [push] jobs: build: runs-on: ubuntu-22.04 steps: - name: Step if: false run: echo github-actions-models-0.14.0/tests/sample-workflows/gh-action-sigstore-python-selftest.yml000064400000000000000000000261641046102023000301750ustar 00000000000000# https://github.com/sigstore/gh-action-sigstore-python/blob/b3690e3a279c94669b1e9e4e1e29317cdc7a52d5/.github/workflows/selftest.yml name: Self-test on: push: branches: - main pull_request: workflow_dispatch: workflow_call: permissions: id-token: write jobs: selftest: strategy: matrix: os: - ubuntu-latest - macos-latest - windows-latest runs-on: ${{ matrix.os }} if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 if: ${{ matrix.os != 'ubuntu-latest' }} with: python-version: "3.x" - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt internal-be-careful-debug: true - name: Check outputs shell: bash run: | [[ -f ./test/artifact.txt.sigstore ]] || exit 1 selftest-release-signing-artifacts-no-op: strategy: matrix: os: - ubuntu-latest - macos-latest - windows-latest runs-on: ${{ matrix.os }} if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 if: ${{ matrix.os != 'ubuntu-latest' }} with: python-version: "3.x" - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt # The trigger for this test is not a release, so this has no effect # (but does not break the workflow either). release-signing-artifacts: true internal-be-careful-debug: true - name: Check outputs shell: bash run: | [[ -f ./test/artifact.txt.sigstore ]] || exit 1 selftest-xfail-invalid-inputs: runs-on: ubuntu-latest strategy: matrix: input: # We forbid inputs that look like flags - "--this-should-not-work" # We fail if the input doesn't exist - "/tmp/extremely-nonexistent-file" if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifact and publish signature continue-on-error: true uses: ./ id: sigstore-python with: inputs: ${{ matrix.input }} internal-be-careful-debug: true - name: Check failure env: XFAIL: ${{ steps.sigstore-python.outcome == 'failure' }} JOB_NAME: ${{ github.job }} run: | echo "xfail ${JOB_NAME}: ${XFAIL}" [[ "${XFAIL}" == "true" ]] || { >&2 echo "expected step to fail"; exit 1; } selftest-staging: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt staging: true internal-be-careful-debug: true - name: Check outputs run: | [[ -f ./test/artifact.txt.sigstore ]] || exit 1 selftest-glob: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifacts and publish signatures uses: ./ id: sigstore-python with: inputs: ./test/*.txt staging: true internal-be-careful-debug: true - name: Check outputs run: | [[ -f ./test/artifact.txt.sigstore ]] || exit 1 [[ -f ./test/artifact1.txt.sigstore ]] || exit 1 [[ -f ./test/artifact2.txt.sigstore ]] || exit 1 selftest-xfail-glob-input-expansion: runs-on: ubuntu-latest env: TEST_DIR: test if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifacts and publish signatures continue-on-error: true uses: ./ id: sigstore-python with: # This should fail since we should never directly expand ${TEST_DIR}; # the user should have to pre-expand it for us. inputs: ./${TEST_DIR}/*.txt staging: true internal-be-careful-debug: true - name: Check failure env: XFAIL: ${{ steps.sigstore-python.outcome == 'failure' }} JOB_NAME: ${{ github.job }} run: | echo "xfail ${JOB_NAME}: ${XFAIL}" [[ "${XFAIL}" == "true" ]] || { >&2 echo "expected step to fail"; exit 1; } selftest-glob-multiple: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifacts and publish signatures uses: ./ id: sigstore-python with: inputs: ./test/artifact*.txt ./test/another*.txt ./test/subdir/*.txt staging: true internal-be-careful-debug: true - name: Check outputs run: | [[ -f ./test/artifact.txt.sigstore ]] || exit 1 [[ -f ./test/artifact1.txt.sigstore ]] || exit 1 [[ -f ./test/artifact2.txt.sigstore ]] || exit 1 [[ -f ./test/another1.txt.sigstore ]] || exit 1 [[ -f ./test/another2.txt.sigstore ]] || exit 1 [[ -f ./test/subdir/hello1.txt.sigstore ]] || exit 1 [[ -f ./test/subdir/hello2.txt.sigstore ]] || exit 1 [[ -f ./test/subdir/hello3.txt.sigstore ]] || exit 1 selftest-upload-artifacts: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt staging: true upload-signing-artifacts: true internal-be-careful-debug: true - uses: actions/download-artifact@v4 with: name: "signing-artifacts-${{ github.job }}" path: ./test/uploaded - name: Verify presence of uploaded files run: | [[ -f ./artifact.txt ]] || exit 1 [[ -f ./artifact.txt.sigstore ]] || exit 1 working-directory: ./test/uploaded selftest-custom-paths: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt signature: ./test/custom_signature.sig certificate: ./test/custom_certificate.crt bundle: ./test/custom_bundle.sigstore staging: true internal-be-careful-debug: true - name: Check outputs run: | [[ -f ./test/custom_signature.sig ]] || exit 1 [[ -f ./test/custom_certificate.crt ]] || exit 1 [[ -f ./test/custom_bundle.sigstore ]] || exit 1 selftest-verify: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt verify: true verify-cert-identity: https://github.com/sigstore/gh-action-sigstore-python/.github/workflows/selftest.yml@${{ github.ref }} verify-oidc-issuer: https://token.actions.githubusercontent.com staging: true internal-be-careful-debug: true selftest-xfail-verify-missing-options: runs-on: ubuntu-latest strategy: matrix: config: # fails if both verify-cert-identity and verify-oidc-issuer are missing - verify: true # fails if either is missing - verify: true verify-oidc-issuer: https://token.actions.githubusercontent.com - verify: true verify-cert-identity: https://github.com/sigstore/gh-action-sigstore-python/.github/workflows/selftest.yml@${{ github.ref }} # fails if either option is passed while verification is disabled - verify: false verify-oidc-issuer: https://token.actions.githubusercontent.com - verify: false verify-cert-identity: https://github.com/sigstore/gh-action-sigstore-python/.github/workflows/selftest.yml@${{ github.ref }} if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Sign artifact and publish signature continue-on-error: true uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt verify: ${{ matrix.config.verify }} verify-oidc-issuer: ${{ matrix.config.verify-oidc-issuer }} verify-cert-identity: ${{ matrix.config.verify-cert-identity }} staging: true internal-be-careful-debug: true - name: Check failure env: XFAIL: ${{ steps.sigstore-python.outcome == 'failure' }} JOB_NAME: ${{ github.job }} run: | echo "xfail ${JOB_NAME}: ${XFAIL}" [[ "${XFAIL}" == "true" ]] || { >&2 echo "expected step to fail"; exit 1; } selftest-identity-token: runs-on: ubuntu-latest if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Get OIDC token id: get-oidc-token run: | identity_token=$( \ curl -H \ "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" \ "$ACTIONS_ID_TOKEN_REQUEST_URL&audience=sigstore" \ | jq -r .value \ ) echo "identity-token=$identity_token" >> $GITHUB_OUTPUT shell: bash - name: Sign artifact and publish signature uses: ./ id: sigstore-python with: inputs: ./test/artifact.txt identity-token: ${{ steps.get-oidc-token.outputs.identity-token }} staging: true internal-be-careful-debug: true all-selftests-pass: if: always() needs: - selftest - selftest-release-signing-artifacts-no-op - selftest-xfail-invalid-inputs - selftest-staging - selftest-glob - selftest-glob-multiple - selftest-upload-artifacts - selftest-custom-paths - selftest-verify - selftest-xfail-verify-missing-options - selftest-identity-token runs-on: ubuntu-latest steps: - name: check test jobs if: (github.event_name != 'pull_request') || !github.event.pull_request.head.repo.fork uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 with: jobs: ${{ toJSON(needs) }} github-actions-models-0.14.0/tests/sample-workflows/git-annex-built-windows.yaml000064400000000000000000000365271046102023000261650ustar 00000000000000# https://github.com/datalad/git-annex/blob/58898b7a9185a94852ac059c3ff1cc3f2fd6deef/.github/workflows/build-windows.yaml name: Build git-annex on Windows on: # Trigger the workflow on pull requests pull_request: paths: - ".github/workflows/build-windows.yaml" - "patches/*.patch" schedule: - cron: "30 03 * * *" workflow_dispatch: inputs: commitish: description: The upstream commitish to build pr: description: The number of the PR to build defaults: run: shell: bash env: LANG: C.utf-8 jobs: build-package: runs-on: windows-2019 outputs: build-version: ${{ steps.build-version.outputs.version }} steps: - name: Checkout this repository uses: actions/checkout@v4 with: fetch-depth: 0 - name: Create pending PR status if: github.event.inputs.pr != '' run: | .github/workflows/tools/set-pr-status \ "${{ github.event.inputs.pr }}" \ Windows \ build-package \ pending env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Setup Haskell uses: haskell/actions/setup@v2 with: enable-stack: true stack-no-global: true - name: Handle long filenames run: git config --system core.longpaths true - name: Determine git-annex ref to build run: | . .github/workflows/tools/set-build-commit echo "BUILD_COMMIT=$BUILD_COMMIT" >> "$GITHUB_ENV" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} INPUT_PR: ${{ github.event.inputs.pr }} INPUT_COMMITISH: ${{ github.event.inputs.commitish }} - name: Check out source files run: | # The goal here is for $BUILD_COMMIT to be the HEAD (necessary for # git-annex's version detection to use the correct git commit) with # the .github/, clients/, and patches/ trees from master — or # whatever ref is being used as the workflow source — also available. git reset --soft "$BUILD_COMMIT" # Avoid checking out unnecessary files with paths that are invalid on # Windows. git ls-tree --name-only HEAD | grep -v '^doc$' | xargs git checkout HEAD git checkout HEAD doc/license ':(glob)doc/*.mdwn' ':(glob)doc/logo*' git checkout "$GITHUB_SHA" -- .github clients patches - name: Get build version id: build-version run: | version="$(git describe "$BUILD_COMMIT" | sed -e 's/-/+git/')" arch=x64 echo "Building $version" echo "version=${version}_$arch" >> "$GITHUB_OUTPUT" - name: Apply local patches run: | .github/workflows/tools/apply-patches patches ${{ github.event_name }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Download and "install" libmagic for windows run: | gh release download -R datalad/file-windows -p file-windows-dist.zip unzip file-windows-dist.zip cp libmagic-1.dll libmagic.dll env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Choose stack config and enable building with magic run: | # Need to use that LTS ATM on Windows # ref: https://github.com/datalad/git-annex/issues/168#issuecomment-1665910564 cp stack-lts-18.13.yaml stack.yaml perl -pli -e 's/magicmime: false/magicmime: true/' stack.yaml - name: Set UPGRADE_LOCATION run: | # This tells git-annex where to upgrade itself from. echo "UPGRADE_LOCATION=http://downloads.kitenet.net/git-annex/windows/current/git-annex-installer.exe" >> "$GITHUB_ENV" - name: Run stack --version run: stack --version - name: stack setup run: stack setup # At this point, stack.yaml.lock exists, so we can activate the cache - name: Enable Stack cache uses: actions/cache@v4 with: path: 'C:\sr\snapshots' key: cache-stack-windows-${{ hashFiles('stack.yaml.lock') }}-${{ hashFiles('git-annex.cabal') }} restore-keys: | cache-stack-windows- - name: Build dependencies run: stack build --only-dependencies --extra-include-dirs=$PWD --extra-lib-dirs=$PWD - name: Update version info for git rev being built. run: | mkdir -p dist stack ghc --no-haddock Build/BuildVersion.hs ./Build/BuildVersion > dist/build-version - name: Build git-annex run: stack install --no-haddock --local-bin-path . - name: Build the installer run: | stack ghc --no-haddock --package nsis Build/NullSoftInstaller.hs TMP="$PWD" ./Build/NullSoftInstaller - name: Add version to installer name run: | mv git-annex-installer.exe \ git-annex-installer_"${{ steps.build-version.outputs.version }}".exe - name: Upload packages uses: actions/upload-artifact@v4 with: name: git-annex-windows-installer_${{ steps.build-version.outputs.version }} path: | git-annex[-_]*.* dist/build-version - name: Create new release if: github.event.inputs.commitish != '' run: | printf '[DEBUG] INPUT_COMMITISH=%b\n' "$INPUT_COMMITISH" if git rev-parse refs/tags/"$INPUT_COMMITISH" &> /dev/null then echo "[INFO] Building a tag; uploading assets to release ..." echo '[DEBUG] BEGIN gh release list' gh release list | tee releases.txt | cat -v echo '[DEBUG] END gh release list' if grep -q "^$INPUT_COMMITISH\b" releases.txt then echo "[INFO] Release already exists; uploading assets" gh release upload "$INPUT_COMMITISH" git-annex[-_]*.* else echo "[INFO] Creating release" gh release create \ --notes "This is an unofficial release build provided by the DataLad team." \ "$INPUT_COMMITISH" git-annex[-_]*.* fi else echo "[INFO] Not building a tag; no release to make" fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} INPUT_COMMITISH: ${{ github.event.inputs.commitish }} - name: Set final PR status if: always() && github.event.inputs.pr != '' run: | .github/workflows/tools/set-pr-status \ "${{ github.event.inputs.pr }}" \ Windows \ build-package \ "${{ job.status }}" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Send e-mail on failed run if: failure() && contains(fromJSON('["schedule", "workflow_dispatch"]'), github.event_name) uses: dawidd6/action-send-mail@v3 with: server_address: ${{ secrets.NOTIFY_SMTP_HOST }} server_port: ${{ secrets.NOTIFY_SMTP_PORT }} username: ${{ secrets.NOTIFY_SMTP_USERNAME }} password: ${{ secrets.NOTIFY_SMTP_PASSWORD }} from: GitHub Actions Notifications to: ${{ secrets.NOTIFY_RECIPIENT }} subject: "[${{ github.repository }}] Build on Windows failed!" body: | A build (via ${{ github.event_name }}) of git-annex for Windows failed! See for more information. test-annex: runs-on: ${{ matrix.os }} needs: build-package strategy: matrix: flavor: ["normal", "custom-config1"] os: [windows-2019] fail-fast: false steps: - name: Checkout this repository uses: actions/checkout@v4 - name: Create pending PR status if: github.event.inputs.pr != '' run: | .github/workflows/tools/set-pr-status \ "${{ github.event.inputs.pr }}" \ Windows \ "test-annex (${{ matrix.flavor }}, ${{ matrix.os }})" \ pending env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Download git-annex package uses: actions/download-artifact@v4 with: name: git-annex-windows-installer_${{ needs.build-package.outputs.build-version }} - name: Install git-annex package shell: powershell run: | ./git-annex-installer_*.exe /S - name: Check that magic files are installed run: | GIT_ANNEX_PATH="$(type -p git-annex)" echo "git-annex is at $GIT_ANNEX_PATH" GIT_ANNEX_DIR="$(dirname "$GIT_ANNEX_PATH")" ls "$GIT_ANNEX_DIR"/*magic* "$GIT_ANNEX_DIR"/*gnurx* ls "$GIT_ANNEX_DIR"/../share/misc/*magic* - name: Check git-annex version for MagicMime flag run: | git annex version | grep 'build flags:.*MagicMime' - name: Print git-annex version run: git annex version - name: Run tests run: | # Do it after we possibly setup HOME git config --global user.email "test@github.land" git config --global user.name "GitHub Almighty" test_opts=( ) case "${{ matrix.flavor }}" in # For git-annex it causes only few temporary directories to be on the crippled FS, # while the main ones produced by git annex test reside in CWD, for which we use # $HOME custom-config1) test_opts=( --test-git-config annex.stalldetection=1KB/120s ) ;; *) ;; esac cd $HOME export | grep -e crippledfs || : timeout 3600 git annex test "${test_opts[@]:-}" - name: Set final PR status if: always() && github.event.inputs.pr != '' run: | .github/workflows/tools/set-pr-status \ "${{ github.event.inputs.pr }}" \ Windows \ "test-annex (${{ matrix.flavor }}, ${{ matrix.os }})" \ "${{ job.status }}" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Send e-mail on failed run if: failure() && contains(fromJSON('["schedule", "workflow_dispatch"]'), github.event_name) uses: dawidd6/action-send-mail@v3 with: server_address: ${{ secrets.NOTIFY_SMTP_HOST }} server_port: ${{ secrets.NOTIFY_SMTP_PORT }} username: ${{ secrets.NOTIFY_SMTP_USERNAME }} password: ${{ secrets.NOTIFY_SMTP_PASSWORD }} from: GitHub Actions Notifications to: ${{ secrets.NOTIFY_RECIPIENT }} subject: "[${{ github.repository }}] Tests of Windows build failed!" body: | The tests for a build (via ${{ github.event_name }}) of git-annex for Windows (flavor: ${{ matrix.flavor }}, OS: ${{ matrix.os }}) failed! See for more information. test-datalad: runs-on: windows-2019 needs: build-package strategy: matrix: version: [master, maint, release] fail-fast: false steps: - name: Checkout this repository uses: actions/checkout@v4 - name: Create pending PR status if: github.event.inputs.pr != '' run: | .github/workflows/tools/set-pr-status \ "${{ github.event.inputs.pr }}" \ Windows \ "test-datalad (${{ matrix.version }})" \ pending env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Download git-annex package uses: actions/download-artifact@v4 with: name: git-annex-windows-installer_${{ needs.build-package.outputs.build-version }} - name: Install git-annex package shell: powershell run: | ./git-annex-installer_*.exe /S - name: Define test host alias shell: cmd run: | echo. >> %SYSTEMROOT%\System32\drivers\etc\hosts echo.127.0.0.1 datalad-test >> %SYSTEMROOT%\System32\drivers\etc\hosts echo.127.0.0.1 datalad-test2 >> %SYSTEMROOT%\System32\drivers\etc\hosts - name: OpenSSH server setup shell: powershell run: | mkdir downloads Invoke-WebRequest -Uri https://github.com/PowerShell/Win32-OpenSSH/releases/download/v7.6.1.0p1-Beta/OpenSSH-Win32.zip -OutFile downloads\openssh.zip 7z x -o"downloads" downloads\openssh.zip - name: Install shell: cmd run: powershell.exe -ExecutionPolicy Bypass -File downloads\OpenSSH-Win32\install-sshd.ps1 - name: Configure service shell: cmd run: powershell.exe New-NetFirewallRule -Name sshd -DisplayName 'OpenSSH Server (sshd)' -Enabled True -Direction Inbound -Protocol TCP -Action Allow -LocalPort 22 - name: Keys in default place run: ssh-keygen -f ~/.ssh/id_rsa -N "" - name: Authorize access with these keys shell: cmd run: | copy %USERPROFILE%\.ssh\id_rsa.pub %USERPROFILE%\.ssh\authorized_keys - name: Configure SSH run: | ( echo Host localhost echo StrictHostKeyChecking no echo Host datalad-test echo StrictHostKeyChecking no echo Host datalad-test2 echo StrictHostKeyChecking no ) > "$USERPROFILE"/.ssh/config - name: Fire up service run: net start sshd - name: Test login run: | ssh -v localhost exit ssh datalad-test exit ssh datalad-test2 exit - name: Enable SSH tests run: echo DATALAD_TESTS_SSH=1 >> "$GITHUB_ENV" - name: Set up environment run: | git config --global user.email "test@github.land" git config --global user.name "GitHub Almighty" - name: Set up Python 3.8 uses: actions/setup-python@v5 with: python-version: 3.8 - name: Install ${{ matrix.version }} Datalad run: | if [ "${{ matrix.version }}" = "release" ]; then commitish="$(gh api --jq .tag_name repos/datalad/datalad/releases/latest)" else commitish="${{ matrix.version }}" fi python -m pip install --upgrade pip pip install git+https://github.com/datalad/datalad@${commitish} env: # Authorize so that we don't run up against low API rate limits GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Install nose et al (just in case!) run: pip install nose pytest vcrpy mock - name: WTF!? run: datalad wtf # needed for ssh certs under ubuntu and tox.ini everywhere - name: Checkout datalad uses: actions/checkout@v4 with: repository: datalad/datalad path: datalad fetch-depth: 1 - name: Run datalad tests run: | mkdir -p __testhome__ cd __testhome__ python -m pytest -c ../datalad/tox.ini -s -v --pyargs datalad - name: Set final PR status if: always() && github.event.inputs.pr != '' run: | .github/workflows/tools/set-pr-status \ "${{ github.event.inputs.pr }}" \ Windows \ "test-datalad (${{ matrix.version }})" \ "${{ job.status }}" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # vim:set et sts=2: github-actions-models-0.14.0/tests/sample-workflows/guacsec-guac-ci.yml000064400000000000000000000220061046102023000242300ustar 00000000000000# https://github.com/guacsec/guac/blob/85868eadbf6a287f9bf5f47d355ff944080d3845/.github/workflows/ci.yaml # # Copyright 2022 The GUAC Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: ci on: workflow_dispatch: # testing only, trigger manually to test it works push: branches: - main pull_request: branches: - main types: - opened - synchronize - reopened permissions: contents: read jobs: test-integration: runs-on: ubuntu-latest name: CI for integration tests steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # tag=v3 - name: setup-go uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # tag=v3.2.1 with: go-version: "1.21" - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - name: Install atlas uses: ariga/setup-atlas@d52cd13fed38eca914fa57071155a4644fd6f820 # v0.2 - name: Setup the project run: go mod download - name: Run backends shell: bash run: | set -euo pipefail cd internal/testing/backend docker compose up -d sleep 10 echo "backends started" - name: Run integration tests env: ENT_TEST_DATABASE_URL: "postgresql://guac:guac@localhost/guac?sslmode=disable" run: make integration-test test-unit: runs-on: ubuntu-latest name: CI for unit tests steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # tag=v3 - name: setup-go uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # tag=v3.2.1 with: go-version: "1.21" - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - name: Install atlas uses: ariga/setup-atlas@d52cd13fed38eca914fa57071155a4644fd6f820 # v0.2 - name: Setup the project run: go mod download - name: Run tests run: make test static-analysis: name: Static Analysis runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # tag=v3 - name: setup-go uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # tag=v3.2.1 with: go-version: "1.21" - name: Install atlas uses: ariga/setup-atlas@d52cd13fed38eca914fa57071155a4644fd6f820 # v0.2 - name: Install formatter run: go install golang.org/x/tools/cmd/goimports@latest - name: Check format run: make fmt - name: Check that all generated code is up to date run: make generated_up_to_date lint: name: Lint runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # tag=v3 - name: setup-go uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # tag=v5.1.0 with: go-version: "1.21" - name: Install atlas uses: ariga/setup-atlas@d52cd13fed38eca914fa57071155a4644fd6f820 # v0.2 - name: golangci-lint uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # tag=v3.2.0 with: only-new-issues: true skip-cache: true - name: Check markdown format run: make format - name: Check that all linted text is up to date run: make generated_up_to_date - name: Run atlas Lint run: make atlas-lint end-to-end: name: E2E runs-on: ubuntu-latest services: postgres: image: postgres:15 env: POSTGRES_USER: guac POSTGRES_PASSWORD: guac POSTGRES_DB: guac ports: - 5432:5432 options: >- --health-cmd "pg_isready -U guac -d guac" --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed with: go-version: "~1.21" - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b with: python-version: "3.10" - name: Install PostgreSQL client tools run: | sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - sudo apt-get update sudo apt-get install -y postgresql-client-15 - name: Set up NATS Server with JetStream run: | docker run -d --name nats-server -p 4222:4222 -p 8222:8222 nats:2.9.17 -js - name: Wait for PostgreSQL to be ready run: | until pg_isready -h localhost -p 5432 -U guac -d guac; do echo "Waiting for PostgreSQL to be ready..." sleep 5 done - name: Run e2e tests run: | ./internal/testing/e2e/e2e env: POSTGRES_USER: guac POSTGRES_PASSWORD: guac POSTGRES_DB: guac POSTGRES_HOST: localhost POSTGRES_PORT: 5432 PGPASSWORD: guac GUAC_DIR: /home/runner/work/guac/guac tilt-ci: name: Run 'tilt ci' runs-on: labels: ubuntu-latest timeout-minutes: 30 steps: - name: Install tools shell: bash run: | sudo apt-get install -y git uuid-runtime # tilt -- https://raw.githubusercontent.com/tilt-dev/tilt/master/scripts/install.sh case $(uname -m) in aarch64) ARCH=arm64;; armv7l) ARCH=arm;; *) ARCH=$(uname -m);; esac VERSION=0.32.0 curl -fsSL https://github.com/tilt-dev/tilt/releases/download/v$VERSION/tilt.$VERSION.linux.$ARCH.tar.gz | tar -xzvC /usr/local/bin tilt # helm case $(uname -m) in aarch64) ARCH=arm64;; armv7l) ARCH=arm;; x86_64) ARCH=amd64;; *) ARCH=$(uname -m);; esac VERSION=3.12.0 curl -fsSL https://get.helm.sh/helm-v$VERSION-linux-$ARCH.tar.gz | tar --strip-components=1 -xzvC /usr/local/bin linux-$ARCH/helm # ctlptl - https://github.com/tilt-dev/ctlptl/blob/main/INSTALL.md CTLPTL_VERSION="0.8.19" curl -fsSL https://github.com/tilt-dev/ctlptl/releases/download/v$CTLPTL_VERSION/ctlptl.$CTLPTL_VERSION.linux.x86_64.tar.gz | sudo tar -xzv -C /usr/local/bin ctlptl # kind - https://kind.sigs.k8s.io/docs/user/quick-start/#installing-from-release-binaries # For AMD64 / x86_64 [ $(uname -m) = x86_64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.19.0/kind-linux-amd64 # For ARM64 [ $(uname -m) = aarch64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.19.0/kind-linux-arm64 chmod +x ./kind sudo mv ./kind /usr/local/bin/kind - name: Install GoReleaser uses: goreleaser/goreleaser-action@286f3b13b1b49da4ac219696163fb8c1c93e1200 # v6.0.0 with: install-only: true - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # tag=v3 - name: setup-go uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # tag=v3.2.1 with: go-version: "1.21" - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - name: Setup the project run: go mod download - name: Setup kind cluster shell: bash run: | ctlptl create cluster kind --registry=ctlptl-registry - name: Run 'tilt ci' shell: bash run: | tilt ci - name: Diag after failure if: ${{ failure() }} shell: bash run: | echo "K8S CLUSTER STATUS" kubectl get all echo "" for pod in $(kubectl get pod | awk '$1 != "NAME" { print $1; }') do echo "" echo "=== DIAG POD ${pod} ===" echo "" kubectl describe "pod/${pod#pod/}" | sed 's,^, ,' done github-actions-models-0.14.0/tests/sample-workflows/homebrew-core-automerge-triggers.yml000064400000000000000000000013121046102023000276550ustar 00000000000000# https://github.com/Homebrew/homebrew-core/blob/ac74cb3f7dbdb2da2200886d5e740de124bab861/.github/workflows/automerge-triggers.yml name: Trigger automerge on: pull_request_review: types: - submitted pull_request_target: types: - unlabeled - ready_for_review jobs: check: if: > github.repository_owner == 'Homebrew' && ((github.event_name == 'pull_request_review' && github.event.review.state == 'approved') || (github.event_name == 'pull_request_target' && (github.event.action == 'ready_for_review' || github.event.label.name == 'automerge-skip'))) runs-on: ubuntu-latest steps: - run: true # This suffices to trigger `automerge.yml`. github-actions-models-0.14.0/tests/sample-workflows/homebrew-core-dispatch-rebottle.yml000064400000000000000000000243551046102023000274720ustar 00000000000000# https://github.com/Homebrew/homebrew-core/blob/8b4101be35fb7053e2026edcf1f7d218a5ac3379/.github/workflows/dispatch-rebottle.yml#L4 name: Dispatch rebottle (for all currently bottled OS versions) run-name: Rebuild bottles of ${{ inputs.formula }} on: workflow_dispatch: inputs: formula: description: Formula name required: true reason: description: Reason for rebottling required: true timeout: description: "Build timeout (in minutes, default: 60 minutes)" type: number default: 60 required: false issue: description: Issue number, where comment on failure would be posted type: number required: false upload: description: "Upload built bottles? (default: false)" type: boolean default: false required: false fail-fast: description: "Fail immediately on a single OS version failure? (default: true)" type: boolean default: true required: false # Intentionally the same as dispatch-build-bottle concurrency: bottle-${{ github.event.inputs.formula }} permissions: contents: read env: HOMEBREW_DEVELOPER: 1 HOMEBREW_GITHUB_ACTIONS: 1 HOMEBREW_NO_AUTO_UPDATE: 1 HOMEBREW_NO_INSTALL_FROM_API: 1 HOMEBREW_NO_BUILD_ERROR_ISSUES: 1 RUN_URL: ${{github.event.repository.html_url}}/actions/runs/${{github.run_id}} DISPATCH_REBOTTLE_SENDER: ${{ github.event.sender.login }} DISPATCH_REBOTTLE_FORMULA: ${{ inputs.formula }} DISPATCH_REBOTTLE_TIMEOUT: ${{ inputs.timeout }} DISPATCH_REBOTTLE_ISSUE: ${{ inputs.issue }} DISPATCH_REBOTTLE_UPLOAD: ${{ inputs.upload }} DISPATCH_REBOTTLE_REASON: ${{ inputs.reason }} jobs: setup: runs-on: ubuntu-22.04 container: image: ghcr.io/homebrew/ubuntu22.04:master outputs: runners: ${{steps.determine-runners.outputs.runners}} steps: - name: Set up Homebrew id: set-up-homebrew uses: Homebrew/actions/setup-homebrew@master with: core: true cask: false test-bot: false - name: Determine runners id: determine-runners run: brew determine-rebottle-runners "${DISPATCH_REBOTTLE_FORMULA}" "${DISPATCH_REBOTTLE_TIMEOUT}" bottle: permissions: contents: read needs: setup strategy: matrix: include: ${{fromJson(needs.setup.outputs.runners)}} fail-fast: ${{inputs.fail-fast}} runs-on: ${{matrix.runner}} container: ${{matrix.container}} timeout-minutes: ${{fromJson(inputs.timeout)}} defaults: run: shell: /bin/bash -e {0} working-directory: ${{matrix.workdir || github.workspace}} env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} BOTTLES_DIR: ${{ matrix.workdir || github.workspace }}/bottles steps: - name: ${{inputs.formula}} id: print_details run: | echo sender="${DISPATCH_REBOTTLE_SENDER}" echo formula="${DISPATCH_REBOTTLE_FORMULA}" echo timeout="${DISPATCH_REBOTTLE_TIMEOUT}" echo issue="${DISPATCH_REBOTTLE_ISSUE}" echo upload="${DISPATCH_REBOTTLE_UPLOAD}" echo reason="${DISPATCH_REBOTTLE_REASON}" - name: Pre-test steps uses: Homebrew/actions/pre-build@master with: bottles-directory: ${{ env.BOTTLES_DIR }} - run: | brew test-bot --only-formulae --only-json-tab --skip-online-checks \ --skip-dependents \ "${DISPATCH_REBOTTLE_FORMULA}" working-directory: ${{ env.BOTTLES_DIR }} env: HOMEBREW_GITHUB_API_TOKEN: ${{secrets.GITHUB_TOKEN}} - name: Post-build steps if: always() uses: Homebrew/actions/post-build@master with: runner: ${{ matrix.runner }} bottles-directory: ${{ env.BOTTLES_DIR }} logs-directory: ${{ env.BOTTLES_DIR }}/logs upload: permissions: issues: write # for Homebrew/actions/post-comment contents: write # for Homebrew/actions/git-try-push packages: write # for brew pr-upload pull-requests: write # for gh pr attestations: write # for actions/attest-build-provenance id-token: write # for actions/attest-build-provenance runs-on: ubuntu-22.04 needs: bottle if: inputs.upload container: image: ghcr.io/homebrew/ubuntu22.04:master defaults: run: shell: bash env: HOMEBREW_SIMULATE_MACOS_ON_LINUX: 1 GH_REPO: ${{github.repository}} GH_NO_UPDATE_NOTIFIER: 1 GH_PROMPT_DISABLED: 1 BOTTLE_BRANCH: ${{github.actor}}/dispatch/${{inputs.formula}}/${{github.run_id}} BOTTLES_DIR: ${{ github.workspace }}/bottles steps: - name: Set up Homebrew id: set-up-homebrew uses: Homebrew/actions/setup-homebrew@master with: core: true cask: false test-bot: false - name: Download bottles from GitHub Actions uses: actions/download-artifact@v4 with: pattern: bottles_* path: ${{ env.BOTTLES_DIR }} merge-multiple: true - name: Setup git id: git-user-config uses: Homebrew/actions/git-user-config@master with: username: ${{ (github.actor != 'github-actions[bot]' && github.actor) || 'BrewTestBot' }} - name: Set up commit signing uses: Homebrew/actions/setup-commit-signing@master with: signing_key: ${{ secrets.BREWTESTBOT_GPG_SIGNING_SUBKEY }} - name: Generate build provenance uses: actions/attest-build-provenance@v1 with: subject-path: ${{ env.BOTTLES_DIR }}/*.tar.gz - name: Checkout branch for bottle commit working-directory: ${{steps.set-up-homebrew.outputs.repository-path}} run: git checkout -b "$BOTTLE_BRANCH" origin/master - name: Upload bottles to GitHub Packages id: upload env: HOMEBREW_GITHUB_PACKAGES_USER: brewtestbot HOMEBREW_GITHUB_PACKAGES_TOKEN: ${{secrets.HOMEBREW_CORE_GITHUB_PACKAGES_TOKEN}} HOMEBREW_GPG_PASSPHRASE: ${{ secrets.BREWTESTBOT_GPG_SIGNING_SUBKEY_PASSPHRASE }} BREWTESTBOT_NAME_EMAIL: "BrewTestBot <1589480+BrewTestBot@users.noreply.github.com>" HOMEBREW_CORE_PATH: ${{steps.set-up-homebrew.outputs.repository-path}} working-directory: ${{ env.BOTTLES_DIR }} run: | brew pr-upload --verbose --committer="$BREWTESTBOT_NAME_EMAIL" --root-url="https://ghcr.io/v2/homebrew/core" --debug echo "title=$(git -C "$HOMEBREW_CORE_PATH" log -1 --format='%s' "$BOTTLE_BRANCH")" >> "$GITHUB_OUTPUT" echo "head_sha=$(git -C "$HOMEBREW_CORE_PATH" rev-parse HEAD)" >> "$GITHUB_OUTPUT" - name: Push commits uses: Homebrew/actions/git-try-push@master with: token: ${{secrets.GITHUB_TOKEN}} directory: ${{steps.set-up-homebrew.outputs.repository-path}} branch: ${{env.BOTTLE_BRANCH}} env: GIT_COMMITTER_NAME: BrewTestBot GIT_COMMITTER_EMAIL: 1589480+BrewTestBot@users.noreply.github.com HOMEBREW_GPG_PASSPHRASE: ${{ secrets.BREWTESTBOT_GPG_SIGNING_SUBKEY_PASSPHRASE }} - name: Open PR with bottle commit id: create-pr uses: Homebrew/actions/create-pull-request@master with: token: ${{secrets.HOMEBREW_GITHUB_PUBLIC_REPO_TOKEN}} base: ${{github.ref}} head: ${{env.BOTTLE_BRANCH}} title: ${{steps.upload.outputs.title}} body: | Created by [`dispatch-rebottle.yml`](${{env.RUN_URL}}) ----- ${{env.DISPATCH_REBOTTLE_REASON}} labels: CI-published-bottle-commits reviewers: ${{github.actor}} - name: Enable automerge env: GH_TOKEN: ${{secrets.HOMEBREW_GITHUB_PUBLIC_REPO_TOKEN}} NODE_ID: ${{steps.create-pr.outputs.node_id}} SHA: ${{steps.upload.outputs.head_sha}} MUTATION: |- mutation ($input: EnablePullRequestAutoMergeInput!) { enablePullRequestAutoMerge(input: $input) { clientMutationId } } run: | gh api graphql \ --field "input[pullRequestId]=$NODE_ID" \ --field "input[expectedHeadOid]=$SHA" \ --raw-field query="$MUTATION" - name: Approve PR env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} PR: ${{steps.create-pr.outputs.number}} run: | gh api \ --method POST \ --header "Accept: application/vnd.github+json" \ --header "X-GitHub-Api-Version: 2022-11-28" \ "/repos/$GITHUB_REPOSITORY/pulls/$PR/reviews" \ --field "event=APPROVE" - name: Wait until PR is merged env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} PR: ${{ steps.create-pr.outputs.number }} run: | # Hold the `concurrency` lock for up to another 10 minutes while the PR has not yet been merged. sleep 300 attempt=0 max_attempts=5 sleep_time=10 while (( attempt < max_attempts )) do if jq --exit-status .merged_at then break fi < <( # We could use `gh pr view`, but that uses 2 API calls. gh api \ --header "Accept: application/vnd.github+json" \ --header "X-GitHub-Api-Version: 2022-11-28" \ "/repos/$GITHUB_REPOSITORY/pulls/$PR" ) sleep "$sleep_time" sleep_time=$(( sleep_time * 2 )) attempt=$(( attempt + 1 )) done comment: permissions: issues: write # for Homebrew/actions/post-comment pull-requests: write # for Homebrew/actions/post-comment needs: [bottle, upload] if: failure() && inputs.issue > 0 runs-on: ubuntu-latest steps: - name: Post comment on failure uses: Homebrew/actions/post-comment@master with: token: ${{secrets.GITHUB_TOKEN}} issue: ${{inputs.issue}} body: ":x: @${{github.actor}} bottle request for ${{inputs.formula}} [failed](${{env.RUN_URL}})." bot_body: ":x: Bottle request for ${{inputs.formula}} [failed](${{env.RUN_URL}})." bot: BrewTestBot github-actions-models-0.14.0/tests/sample-workflows/jazzband-tablib-docs-lint.yml000064400000000000000000000015251046102023000262430ustar 00000000000000# https://raw.githubusercontent.com/jazzband/tablib/dcab406c553fc8b3c2e0aef955e9e8adea0590d8/.github/workflows/docs-lint.yml name: Docs and lint on: [push, pull_request, workflow_dispatch] env: FORCE_COLOR: 1 PIP_DISABLE_PIP_VERSION_CHECK: 1 permissions: contents: read jobs: build: runs-on: ubuntu-latest strategy: matrix: env: - TOXENV: docs - TOXENV: lint steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: "3.x" cache: pip cache-dependency-path: "pyproject.toml" - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade tox - name: Tox run: tox env: ${{ matrix.env }} github-actions-models-0.14.0/tests/sample-workflows/letsencrypt-boulder-boulder-ci.yml000064400000000000000000000132051046102023000273420ustar 00000000000000# https://github.com/letsencrypt/boulder/blob/e182d889b220421caefbf384b36467f771b5f8d3/.github/workflows/boulder-ci.yml # Boulder CI test suite workflow name: Boulder CI # Controls when the action will run. on: # Triggers the workflow on push or pull request events but only for the main branch push: branches: - main - release-branch-* pull_request: branches: - "**" # Allows you to run this workflow manually from the Actions tab workflow_dispatch: # A workflow run is made up of one or more jobs that can run sequentially or in parallel permissions: contents: read jobs: # Main test jobs. This looks like a single job, but the matrix # items will multiply it. For example every entry in the # BOULDER_TOOLS_TAG list will run with every test. If there were two # tags and 5 tests there would be 10 jobs run. b: # The type of runner that the job will run on runs-on: ubuntu-20.04 strategy: # When set to true, GitHub cancels all in-progress jobs if any matrix job fails. Default: true fail-fast: false # Test matrix. matrix: # Add additional docker image tags here and all tests will be run with the additional image. BOULDER_TOOLS_TAG: - go1.23.1_2024-09-05 # Tests command definitions. Use the entire "docker compose" command you want to run. tests: # Run ./test.sh --help for a description of each of the flags. - "./t.sh --lints --generate" - "./t.sh --integration" # Testing Config Changes: # Config changes that have landed in main but not yet been applied to # production can be made in `test/config-next/.json`. # # Testing DB Schema Changes: # Database migrations in `sa/_db-next/migrations` are only performed # when `docker compose` is called using `-f docker-compose.yml -f # docker-compose.next.yml`. - "./tn.sh --integration" - "./t.sh --unit --enable-race-detection" - "./tn.sh --unit --enable-race-detection" - "./t.sh --start-py" env: # This sets the docker image tag for the boulder-tools repository to # use in tests. It will be set appropriately for each tag in the list # defined in the matrix. BOULDER_TOOLS_TAG: ${{ matrix.BOULDER_TOOLS_TAG }} # Sequence of tasks that will be executed as part of the job. steps: # Checks out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v4 with: persist-credentials: false - name: Docker Login # You may pin to the exact commit or the version. # uses: docker/login-action@f3364599c6aa293cdc2b8391b1b56d0c30e45c8a uses: docker/login-action@v3.3.0 with: # Username used to log against the Docker registry username: ${{ secrets.DOCKER_USERNAME}} # Password or personal access token used to log against the Docker registry password: ${{ secrets.DOCKER_PASSWORD}} # Log out from the Docker registry at the end of a job logout: true continue-on-error: true # Print the env variable being used to pull the docker image. For # informational use. - name: Print BOULDER_TOOLS_TAG run: echo "Using BOULDER_TOOLS_TAG ${BOULDER_TOOLS_TAG}" # Pre-pull the docker containers before running the tests. - name: docker compose pull run: docker compose pull # Run the test matrix. This will run - name: "Run Test: ${{ matrix.tests }}" run: ${{ matrix.tests }} govulncheck: runs-on: ubuntu-22.04 strategy: fail-fast: false steps: # Checks out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v4 with: persist-credentials: false - name: Setup Go uses: actions/setup-go@v5 with: # When Go produces a security release, we want govulncheck to run # against the most recently released Go version. check-latest: true go-version: "stable" - name: Run govulncheck run: go run golang.org/x/vuln/cmd/govulncheck@latest ./... vendorcheck: runs-on: ubuntu-20.04 strategy: # When set to true, GitHub cancels all in-progress jobs if any matrix job fails. Default: true fail-fast: false matrix: go-version: ["1.22.5"] steps: # Checks out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v4 with: persist-credentials: false - name: Setup Go ${{ matrix.go-version }} uses: actions/setup-go@v5 with: go-version: ${{ matrix.go-version }} - name: Verify vendor shell: bash run: | go mod tidy go mod vendor git diff --exit-code # This is a utility build job to detect if the status of any of the # above jobs have failed and fail if so. It is needed so there can be # one static job name that can be used to determine success of the job # in GitHub branch protection. # It does not block on the result of govulncheck so that a new vulnerability # disclosure does not prevent any other PRs from being merged. boulder_ci_test_matrix_status: permissions: contents: none if: ${{ always() }} runs-on: ubuntu-latest name: Boulder CI Test Matrix needs: - b - vendorcheck steps: - name: Check boulder ci test matrix status if: ${{ needs.b.result != 'success' || needs.vendorcheck.result != 'success' }} run: exit 1 github-actions-models-0.14.0/tests/sample-workflows/pip-api-test.yml000064400000000000000000000062051046102023000236270ustar 00000000000000# https://github.com/di/pip-api/blob/60691ed6bdc0c213253593de869bff1cf9195b81/.github/workflows/test.yml name: Test on: [push, pull_request] concurrency: group: >- ${{ github.workflow }}-${{ github.ref_type }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true env: dists-artifact-name: python-package-distributions sdist-artifact-name-wildcard: pip-api-*.tar.gz jobs: lint: name: Lint runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Setup python uses: actions/setup-python@v5 - name: Install tox run: python -m pip install tox - name: Run linting run: python -m tox -e lint build-sdist: name: 📦 Build the source distribution runs-on: ubuntu-latest steps: - name: Grab the src from GH uses: actions/checkout@v4 - name: Install `pypa/build` PEP 517 front-end run: python -m pip install 'build ~= 0.10.0' - name: 📦 Build an sdist run: python -m build --sdist - name: Verify that the artifact with expected name got created run: >- ls -1 dist/${{ env.sdist-artifact-name-wildcard }} - name: Store the distribution package uses: actions/upload-artifact@v3 with: name: ${{ env.dists-artifact-name }} # NOTE: Exact expected file names are specified here # NOTE: as a safety measure — if anything weird ends # NOTE: up being in this dir or not all dists will be # NOTE: produced, this will fail the workflow. path: | dist/${{ env.sdist-artifact-name-wildcard }} retention-days: 15 build-matrix: name: Build the test matrix needs: lint runs-on: ubuntu-latest outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: - uses: actions/checkout@v4 - name: Setup python uses: actions/setup-python@v5 - name: Install tox run: python -m pip install tox - id: set-matrix run: >- echo "matrix=$(python generate_matrix.py)" >> "${GITHUB_OUTPUT}" test: name: ${{ matrix.toxenv }} needs: - build-matrix - build-sdist runs-on: ubuntu-latest strategy: matrix: ${{ fromJson(needs.build-matrix.outputs.matrix) }} steps: - name: Retrieve the project source from an sdist inside the GHA artifact uses: re-actors/checkout-python-sdist@release/v1 with: source-tarball-name: ${{ env.sdist-artifact-name-wildcard }} workflow-artifact-name: ${{ env.dists-artifact-name }} - name: Setup python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install tox run: python -m pip install tox - name: Run tests run: python -m tox -e ${{ matrix.toxenv }} check: if: always() needs: - test runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} github-actions-models-0.14.0/tests/sample-workflows/pip-audit-ci.yml000064400000000000000000000012401046102023000235720ustar 00000000000000# https://github.com/pypa/pip-audit/blob/1fd67af0653a8e66b9470adab2e408a435632f19/.github/workflows/ci.yml name: CI on: push: branches: - main pull_request: schedule: - cron: "0 12 * * *" jobs: test: strategy: matrix: python: - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" runs-on: ubuntu-latest steps: - uses: actions/checkout@v4.1.1 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} cache: "pip" cache-dependency-path: pyproject.toml - name: test run: make test PIP_AUDIT_EXTRA=test github-actions-models-0.14.0/tests/sample-workflows/pip-audit-scorecards.yml000064400000000000000000000036551046102023000253430ustar 00000000000000# https://github.com/pypa/pip-audit/blob/1fd67af0653a8e66b9470adab2e408a435632f19/.github/workflows/scorecards.yml name: Scorecards supply-chain security on: # Only the default branch is supported. branch_protection_rule: schedule: - cron: "19 4 * * 0" push: branches: ["main"] # Declare default permissions as read only. permissions: read-all jobs: analysis: name: Scorecards analysis runs-on: ubuntu-latest permissions: # Needed to upload the results to code-scanning dashboard. security-events: write # Used to receive a badge. (Upcoming feature) id-token: write steps: - name: "Checkout code" uses: actions/checkout@v4.1.1 # tag=v3.0.0 with: persist-credentials: false - name: "Run analysis" uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # tag=v2.3.1 with: results_file: results.sarif results_format: sarif # Publish the results for public repositories to enable scorecard badges. For more details, see # https://github.com/ossf/scorecard-action#publishing-results. # For private repositories, `publish_results` will automatically be set to `false`, regardless # of the value entered here. publish_results: true # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # tag=v3.1.3 with: name: SARIF file path: results.sarif retention-days: 5 # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" uses: github/codeql-action/upload-sarif@cdcdbb579706841c47f7063dda365e292e5cad7a # tag=v2.13.4 with: sarif_file: results.sarif github-actions-models-0.14.0/tests/sample-workflows/pwn-requests.yml000064400000000000000000000011651046102023000237700ustar 00000000000000# from: https://securitylab.github.com/resources/github-actions-preventing-pwn-requests/ # INSECURE. Provided as an example only. on: pull_request_target jobs: build: name: Build and test runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: ref: ${{ github.event.pull_request.head.sha }} - uses: actions/setup-node@v1 - run: | npm install npm build - uses: completely/fakeaction@v2 with: arg1: ${{ secrets.supersecret }} - uses: fakerepo/comment-on-pr@v1 with: message: | Thank you! github-actions-models-0.14.0/tests/sample-workflows/pyca-cryptography-ci.yml000064400000000000000000000534171046102023000254000ustar 00000000000000# https://github.com/pyca/cryptography/blob/4a42c1c961c678d784de763d82794527c2374f2f/.github/workflows/ci.yml name: CI on: pull_request: {} push: branches: - main - '*.*.x' tags: - '*.*' - '*.*.*' permissions: contents: read concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true env: CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse CARGO_INCREMENTAL: 0 jobs: linux: runs-on: ubuntu-latest strategy: fail-fast: false matrix: PYTHON: - {VERSION: "3.12", NOXSESSION: "flake"} - {VERSION: "3.12", NOXSESSION: "rust"} - {VERSION: "3.12", NOXSESSION: "docs", OPENSSL: {TYPE: "openssl", VERSION: "3.2.0"}} - {VERSION: "pypy-3.9", NOXSESSION: "tests-nocoverage"} - {VERSION: "pypy-3.10", NOXSESSION: "tests-nocoverage"} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "openssl", VERSION: "1.1.1w"}} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "openssl", VERSION: "3.0.12"}} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "openssl", VERSION: "3.1.4"}} - {VERSION: "3.12", NOXSESSION: "tests-ssh", OPENSSL: {TYPE: "openssl", VERSION: "3.2.0"}} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "openssl", VERSION: "3.2.0", CONFIG_FLAGS: "no-engine no-rc2 no-srtp no-ct no-psk"}} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "openssl", VERSION: "3.2.0", CONFIG_FLAGS: "no-legacy", NO_LEGACY: "1"}} - {VERSION: "3.12", NOXSESSION: "tests", NOXARGS: "--enable-fips=1", OPENSSL: {TYPE: "openssl", CONFIG_FLAGS: "enable-fips", VERSION: "3.1.4"}} - {VERSION: "3.12", NOXSESSION: "tests", NOXARGS: "--enable-fips=1", OPENSSL: {TYPE: "openssl", CONFIG_FLAGS: "enable-fips", VERSION: "3.2.0"}} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "libressl", VERSION: "3.7.3"}} - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "libressl", VERSION: "3.8.2"}} - {VERSION: "3.12", NOXSESSION: "tests-randomorder"} # Latest commit on the BoringSSL master branch, as of Dec 22, 2023. - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "boringssl", VERSION: "b6e0eba6e62333652290514e51b75b966b27b27c"}} # Latest commit on the OpenSSL master branch, as of Jan 02, 2024. - {VERSION: "3.12", NOXSESSION: "tests", OPENSSL: {TYPE: "openssl", VERSION: "94be985cbcc1f0a5cf4f172d4a8d06c5c623122b"}} # Builds with various Rust versions. Includes MSRV and next # potential future MSRV: # 1.64 - maturin, workspace inheritance # 1.65 - Generic associated types (GATs) - {VERSION: "3.12", NOXSESSION: "rust-noclippy,tests", RUST: "1.63.0"} - {VERSION: "3.12", NOXSESSION: "rust,tests", RUST: "1.64.0"} - {VERSION: "3.12", NOXSESSION: "rust,tests", RUST: "beta"} - {VERSION: "3.12", NOXSESSION: "rust,tests", RUST: "nightly"} timeout-minutes: 15 steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 timeout-minutes: 3 with: persist-credentials: false - name: Setup python id: setup-python uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.PYTHON.VERSION }} cache: pip cache-dependency-path: ci-constraints-requirements.txt timeout-minutes: 3 - name: Setup rust uses: dtolnay/rust-toolchain@1482605bfc5719782e1267fd0c0cc350fe7646b8 with: toolchain: ${{ matrix.PYTHON.RUST }} components: rustfmt,clippy if: matrix.PYTHON.RUST - run: rustup component add llvm-tools-preview if: matrix.PYTHON.NOXSESSION != 'flake' && matrix.PYTHON.NOXSESSION != 'docs' - name: Clone test vectors timeout-minutes: 2 uses: ./.github/actions/fetch-vectors if: matrix.PYTHON.NOXSESSION != 'flake' && matrix.PYTHON.NOXSESSION != 'docs' && matrix.PYTHON.NOXSESSION != 'rust' - name: Compute config hash and set config vars run: | DEFAULT_CONFIG_FLAGS="shared no-ssl2 no-ssl3" CONFIG_FLAGS="$DEFAULT_CONFIG_FLAGS $CONFIG_FLAGS" OPENSSL_HASH=$(echo "${{ matrix.PYTHON.OPENSSL.TYPE }}-${{ matrix.PYTHON.OPENSSL.VERSION }}-$CONFIG_FLAGS" | sha1sum | sed 's/ .*$//') echo "CONFIG_FLAGS=${CONFIG_FLAGS}" >> $GITHUB_ENV echo "OPENSSL_HASH=${OPENSSL_HASH}" >> $GITHUB_ENV echo "OSSL_INFO=${{ matrix.PYTHON.OPENSSL.TYPE }}-${{ matrix.PYTHON.OPENSSL.VERSION }}-${CONFIG_FLAGS}" >> $GITHUB_ENV echo "OSSL_PATH=${{ github.workspace }}/osslcache/${{ matrix.PYTHON.OPENSSL.TYPE }}-${{ matrix.PYTHON.OPENSSL.VERSION }}-${OPENSSL_HASH}" >> $GITHUB_ENV env: CONFIG_FLAGS: ${{ matrix.PYTHON.OPENSSL.CONFIG_FLAGS }} if: matrix.PYTHON.OPENSSL - name: Load OpenSSL cache uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2 id: ossl-cache timeout-minutes: 2 with: path: ${{ github.workspace }}/osslcache # When altering the openssl build process you may need to increment # the value on the end of this cache key so that you can prevent it # from fetching the cache and skipping the build step. key: ${{ matrix.PYTHON.OPENSSL.TYPE }}-${{ matrix.PYTHON.OPENSSL.VERSION }}-${{ env.OPENSSL_HASH }}-9 if: matrix.PYTHON.OPENSSL - name: Build custom OpenSSL/LibreSSL run: .github/workflows/build_openssl.sh env: TYPE: ${{ matrix.PYTHON.OPENSSL.TYPE }} VERSION: ${{ matrix.PYTHON.OPENSSL.VERSION }} if: matrix.PYTHON.OPENSSL && steps.ossl-cache.outputs.cache-hit != 'true' - name: Set CFLAGS/LDFLAGS run: | echo "OPENSSL_DIR=${OSSL_PATH}" >> $GITHUB_ENV echo "CFLAGS=${CFLAGS} -Werror=implicit-function-declaration" >> $GITHUB_ENV echo "RUSTFLAGS=-Clink-arg=-Wl,-rpath=${OSSL_PATH}/lib -Clink-arg=-Wl,-rpath=${OSSL_PATH}/lib64" >> $GITHUB_ENV if: matrix.PYTHON.OPENSSL - name: Cache rust and pip uses: ./.github/actions/cache timeout-minutes: 2 with: # We have both the Python version from the matrix and from the # setup-python step because the latter doesn't distinguish # pypy3-3.8 and pypy3-3.9 -- both of them show up as 7.3.11. key: ${{ matrix.PYTHON.VERSION }}-${{ steps.setup-python.outputs.python-version }}-${{ matrix.PYTHON.NOXSESSION }}-${{ env.OPENSSL_HASH }} - run: python -m pip install -c ci-constraints-requirements.txt 'nox' 'tomli; python_version < "3.11"' - name: Create nox environment run: | nox -v --install-only env: NOXSESSION: ${{ matrix.PYTHON.NOXSESSION }} CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} - name: Tests run: | nox --no-install -- --color=yes --wycheproof-root=wycheproof --x509-limbo-root=x509-limbo ${{ matrix.PYTHON.NOXARGS }} env: NOXSESSION: ${{ matrix.PYTHON.NOXSESSION }} COLUMNS: 80 CRYPTOGRAPHY_OPENSSL_NO_LEGACY: ${{ matrix.PYTHON.OPENSSL.NO_LEGACY }} CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} - uses: ./.github/actions/upload-coverage distros: runs-on: ${{ matrix.IMAGE.RUNNER }} container: ghcr.io/pyca/cryptography-runner-${{ matrix.IMAGE.IMAGE }} strategy: fail-fast: false matrix: IMAGE: - {IMAGE: "rhel8", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "rhel8-fips", NOXSESSION: "tests", RUNNER: "ubuntu-latest", FIPS: true} - {IMAGE: "buster", NOXSESSION: "tests-nocoverage", RUNNER: "ubuntu-latest"} - {IMAGE: "bullseye", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "bookworm", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "trixie", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "sid", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "ubuntu-focal", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "ubuntu-jammy", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "ubuntu-rolling", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "fedora", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "alpine", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "centos-stream9", NOXSESSION: "tests", RUNNER: "ubuntu-latest"} - {IMAGE: "centos-stream9-fips", NOXSESSION: "tests", RUNNER: "ubuntu-latest", FIPS: true} - {IMAGE: "ubuntu-jammy:aarch64", NOXSESSION: "tests", RUNNER: [self-hosted, Linux, ARM64]} - {IMAGE: "alpine:aarch64", NOXSESSION: "tests", RUNNER: [self-hosted, Linux, ARM64]} timeout-minutes: 15 env: RUSTUP_HOME: /root/.rustup steps: - name: Ridiculous alpine workaround for actions support on arm64 run: | # This modifies /etc/os-release so the JS actions # from GH can't detect that it's on alpine:aarch64. It will # then use a glibc nodejs, which works fine when gcompat # is installed in the container (which it is) sed -i "s:ID=alpine:ID=NotpineForGHA:" /etc/os-release if: matrix.IMAGE.IMAGE == 'alpine:aarch64' - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 timeout-minutes: 3 with: persist-credentials: false - name: Cache rust and pip uses: ./.github/actions/cache timeout-minutes: 2 with: key: ${{ matrix.IMAGE.IMAGE }} - name: Clone test vectors timeout-minutes: 2 uses: ./.github/actions/fetch-vectors # When run in a docker container the home directory doesn't have the same owner as the # apparent user so pip refuses to create a cache dir - name: create pip cache dir run: mkdir -p "${HOME}/.cache/pip" - run: | echo "OPENSSL_FORCE_FIPS_MODE=1" >> $GITHUB_ENV if: matrix.IMAGE.FIPS - run: /venv/bin/python -m pip install -c ci-constraints-requirements.txt 'nox' 'tomli; python_version < "3.11"' - run: '/venv/bin/nox -v --install-only' env: CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} # OPENSSL_ENABLE_SHA1_SIGNATURES is for CentOS 9 Stream OPENSSL_ENABLE_SHA1_SIGNATURES: 1 NOXSESSION: ${{ matrix.IMAGE.NOXSESSION }} - run: '/venv/bin/nox --no-install -- --color=yes --wycheproof-root="wycheproof" --x509-limbo-root="x509-limbo"' env: COLUMNS: 80 # OPENSSL_ENABLE_SHA1_SIGNATURES is for CentOS 9 Stream OPENSSL_ENABLE_SHA1_SIGNATURES: 1 NOXSESSION: ${{ matrix.IMAGE.NOXSESSION }} - uses: ./.github/actions/upload-coverage macos: runs-on: ${{ matrix.RUNNER.OS }} strategy: fail-fast: false matrix: RUNNER: - {OS: 'macos-13', ARCH: 'x86_64'} - {OS: [self-hosted, macos, ARM64, tart], ARCH: 'arm64'} PYTHON: - {VERSION: "3.7", NOXSESSION: "tests-nocoverage"} - {VERSION: "3.12", NOXSESSION: "tests"} exclude: # We only test latest Python on arm64. py37 won't work since there's no universal2 binary - PYTHON: {VERSION: "3.7", NOXSESSION: "tests-nocoverage"} RUNNER: {OS: [self-hosted, macos, ARM64, tart], ARCH: 'arm64'} timeout-minutes: 15 steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 timeout-minutes: 3 with: persist-credentials: false - name: Cache rust and pip uses: ./.github/actions/cache timeout-minutes: 2 with: key: ${{ matrix.PYTHON.NOXSESSION }}-${{ matrix.PYTHON.VERSION }} - name: Setup python uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.PYTHON.VERSION }} architecture: 'x64' # we force this right now so that it will install the universal2 on arm64 cache: pip cache-dependency-path: ci-constraints-requirements.txt timeout-minutes: 3 - run: rustup component add llvm-tools-preview - run: python -m pip install -c ci-constraints-requirements.txt 'nox' 'tomli; python_version < "3.11"' - name: Clone test vectors timeout-minutes: 2 uses: ./.github/actions/fetch-vectors - uses: dawidd6/action-download-artifact@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d # v3.0.0 with: repo: pyca/infra workflow: build-macos-openssl.yml branch: main workflow_conclusion: success name: openssl-macos-universal2 path: "../openssl-macos-universal2/" github_token: ${{ secrets.GITHUB_TOKEN }} - name: Build nox environment run: | OPENSSL_DIR=$(readlink -f ../openssl-macos-universal2/) \ OPENSSL_STATIC=1 \ CFLAGS="-Werror -Wno-error=deprecated-declarations -Wno-error=incompatible-pointer-types-discards-qualifiers -Wno-error=unused-function -mmacosx-version-min=10.12" \ nox -v --install-only env: NOXSESSION: ${{ matrix.PYTHON.NOXSESSION }} CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} - name: Tests run: nox --no-install -- --color=yes --wycheproof-root=wycheproof --x509-limbo-root=x509-limbo env: NOXSESSION: ${{ matrix.PYTHON.NOXSESSION }} COLUMNS: 80 - uses: ./.github/actions/upload-coverage windows: runs-on: windows-latest strategy: fail-fast: false matrix: WINDOWS: - {ARCH: 'x86', WINDOWS: 'win32'} - {ARCH: 'x64', WINDOWS: 'win64'} PYTHON: - {VERSION: "3.7", NOXSESSION: "tests-nocoverage"} - {VERSION: "3.12", NOXSESSION: "tests"} timeout-minutes: 15 steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 timeout-minutes: 3 with: persist-credentials: false - name: Setup python id: setup-python uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.PYTHON.VERSION }} architecture: ${{ matrix.WINDOWS.ARCH }} cache: pip cache-dependency-path: ci-constraints-requirements.txt timeout-minutes: 3 - run: rustup component add llvm-tools-preview - name: Cache rust and pip uses: ./.github/actions/cache timeout-minutes: 2 with: key: ${{ matrix.PYTHON.NOXSESSION }}-${{ matrix.WINDOWS.ARCH }}-${{ steps.setup-python.outputs.python-version }} - run: python -m pip install -c ci-constraints-requirements.txt "nox" "tomli; python_version < '3.11'" - uses: dawidd6/action-download-artifact@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d # v3.0.0 with: repo: pyca/infra workflow: build-windows-openssl.yml branch: main workflow_conclusion: success name: "openssl-${{ matrix.WINDOWS.WINDOWS }}" path: "C:/openssl-${{ matrix.WINDOWS.WINDOWS }}/" github_token: ${{ secrets.GITHUB_TOKEN }} - name: Configure run: | echo "OPENSSL_DIR=C:/openssl-${{ matrix.WINDOWS.WINDOWS }}" >> $GITHUB_ENV shell: bash - name: Clone test vectors timeout-minutes: 2 uses: ./.github/actions/fetch-vectors - name: Build nox environment run: nox -v --install-only env: NOXSESSION: ${{ matrix.PYTHON.NOXSESSION }} CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} - name: Tests run: nox --no-install -- --color=yes --wycheproof-root=wycheproof --x509-limbo-root=x509-limbo env: NOXSESSION: ${{ matrix.PYTHON.NOXSESSION }} COLUMNS: 80 CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} - uses: ./.github/actions/upload-coverage linux-downstream: runs-on: ubuntu-latest strategy: fail-fast: false matrix: DOWNSTREAM: - paramiko - pyopenssl - pyopenssl-release - twisted - aws-encryption-sdk - dynamodb-encryption-sdk - certbot - certbot-josepy - mitmproxy - scapy PYTHON: - '3.12' name: "Downstream tests for ${{ matrix.DOWNSTREAM }}" timeout-minutes: 15 steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 timeout-minutes: 3 with: persist-credentials: false - name: Cache rust and pip uses: ./.github/actions/cache timeout-minutes: 2 - name: Setup python uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.PYTHON }} cache: pip cache-dependency-path: ci-constraints-requirements.txt timeout-minutes: 3 - run: ./.github/downstream.d/${{ matrix.DOWNSTREAM }}.sh install - run: pip install . setuptools env: CARGO_TARGET_DIR: ${{ format('{0}/src/rust/target/', github.workspace) }} # cryptography main has a version of "(X+1).0.0.dev1" where X is the # most recently released major version. A package used by a downstream # may depend on cryptography <=X. If you use entrypoints stuff, this can # lead to runtime errors due to version incompatibilities. Rename the # dist-info directory to pretend to be an older version to "solve" this. - run: | import json import pkg_resources import shutil import urllib.request d = pkg_resources.get_distribution("cryptography") with urllib.request.urlopen("https://pypi.org/pypi/cryptography/json") as r: latest_version = json.load(r)["info"]["version"] new_path = d.egg_info.replace(d.version, latest_version) shutil.move(d.egg_info, new_path) shell: python - run: ./.github/downstream.d/${{ matrix.DOWNSTREAM }}.sh run all-green: # https://github.community/t/is-it-possible-to-require-all-github-actions-tasks-to-pass-without-enumerating-them/117957/4?u=graingert runs-on: ubuntu-latest needs: [linux, distros, macos, windows, linux-downstream] if: ${{ always() }} timeout-minutes: 3 steps: - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 timeout-minutes: 3 with: persist-credentials: false - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 with: jobs: ${{ toJSON(needs) }} - name: Setup python if: ${{ always() }} uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: '3.12' cache: pip cache-dependency-path: ci-constraints-requirements.txt timeout-minutes: 3 - run: pip install -c ci-constraints-requirements.txt coverage[toml] if: ${{ always() }} - name: Download coverage data if: ${{ always() }} uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 with: name: coverage-data - name: Combine coverage and fail if it's <100%. if: ${{ always() }} id: combinecoverage run: | set +e python -m coverage combine echo "## Python Coverage" >> $GITHUB_STEP_SUMMARY python -m coverage report -m --fail-under=100 > COV_REPORT COV_EXIT_CODE=$? cat COV_REPORT if [ $COV_EXIT_CODE -ne 0 ]; then echo "🚨 Python Coverage failed. Under 100" | tee -a $GITHUB_STEP_SUMMARY fi echo '```' >> $GITHUB_STEP_SUMMARY cat COV_REPORT >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY exit $COV_EXIT_CODE - name: Combine rust coverage and fail if it's <100%. if: ${{ always() }} id: combinerustcoverage run: | set +e sudo apt-get install -y lcov RUST_COVERAGE_OUTPUT=$(lcov $(for f in *.lcov; do echo --add-tracefile "$f"; done) -o combined.lcov | grep lines) echo "## Rust Coverage" >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY echo $RUST_COVERAGE_OUTPUT >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY if ! echo "$RUST_COVERAGE_OUTPUT" | grep "100.0%"; then echo "🚨 Rust Coverage failed. Under 100" | tee -a $GITHUB_STEP_SUMMARY exit 1 fi - name: Create rust coverage HTML run: genhtml combined.lcov -o rust-coverage if: ${{ failure() && steps.combinerustcoverage.outcome == 'failure' }} - name: Create coverage HTML run: python -m coverage html if: ${{ failure() && steps.combinecoverage.outcome == 'failure' }} - name: Upload HTML report. uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0 with: name: _html-report path: htmlcov if-no-files-found: ignore if: ${{ failure() && steps.combinecoverage.outcome == 'failure' }} - name: Upload rust HTML report. uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0 with: name: _html-rust-report path: rust-coverage if-no-files-found: ignore if: ${{ failure() && steps.combinerustcoverage.outcome == 'failure' }} github-actions-models-0.14.0/tests/sample-workflows/pypi-attestations-release.yml000064400000000000000000000017761046102023000264420ustar 00000000000000# https://github.com/trailofbits/pypi-attestations/blob/b5920dee025c93b2bfefbcccc6acc7eab7b8a18e/.github/workflows/release.yml on: release: types: - published name: release permissions: # Trusted Publishing + attestations id-token: write attestations: write jobs: pypi: name: upload release to PyPI runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version-file: pyproject.toml cache: "pip" cache-dependency-path: pyproject.toml - name: deps run: python -m pip install -U setuptools build wheel - name: build run: python -m build - name: publish uses: pypa/gh-action-pypi-publish@release/v1 - name: attest uses: actions/attest@v1 with: subject-path: | ./dist/*.tar.gz ./dist/*.whl predicate-type: "https://docs.pypi.org/attestations/publish/v1" predicate: "{}" github-actions-models-0.14.0/tests/sample-workflows/rnpgp-rnp-centos-and-fedora.yml000064400000000000000000000430561046102023000265320ustar 00000000000000# https://github.com/rnpgp/rnp/blob/ddcbaa932f01a349969e9689de5bccd485090d02/.github/workflows/centos-and-fedora.yml name: centos-and-fedora on: push: branches: - main - "release/**" paths-ignore: - "/*.sh" - "/.*" - "/_*" - "Brewfile" - "docs/**" - "**.adoc" - "**.md" - "**.nix" - "flake.lock" - "version.txt" - ".github/workflows/*.yml" - "!.github/workflows/centos-and-fedora.yml" pull_request: paths-ignore: - "/*.sh" - "/.*" - "/_*" - "Brewfile" - "docs/**" - "**.adoc" - "**.md" - "**.nix" - "flake.lock" - "version.txt" concurrency: group: "${{ github.workflow }}-${{ github.job }}-${{ github.head_ref || github.ref_name }}" cancel-in-progress: true env: CORES: 2 RNP_LOG_CONSOLE: 1 CODECOV_TOKEN: dbecf176-ea3f-4832-b743-295fd71d0fad jobs: tests: name: ${{ matrix.image.name }} [CC ${{ matrix.env.CC }}; backend ${{ matrix.image.backend }} ${{ matrix.image.botan_ver }}; gpg ${{ matrix.image.gpg_ver }}; build ${{ matrix.env.BUILD_MODE }}; SM2 ${{ matrix.image.sm2 }}; IDEA ${{ matrix.image.idea }}] runs-on: ubuntu-latest timeout-minutes: 120 strategy: fail-fast: false matrix: env: - { CC: gcc, CXX: g++, BUILD_MODE: normal, SHARED_LIBS: on } # normal --> Release build; sanitize --> Debug build so theoretically test conditions are different # - { CC: clang, CXX: clang++, BUILD_MODE: normal } - { CC: clang, CXX: clang++, BUILD_MODE: sanitize, SHARED_LIBS: on } # All cotainers have gpg stable and lts installed # centos-9-amd64 has botan 2.19.3 installed # fedora-39-amd64 has botan 2.19.4 installed # Any other version has to be built explicitly ! # Pls refer to https://github.com/rnpgp/rnp-ci-containers#readme for more image details image: - { name: "CentOS 9", container: "centos-9-amd64", backend: "Botan", botan_ver: "system", gpg_ver: "system", } - { name: "CentOS 9", container: "centos-9-amd64", backend: "Botan", botan_ver: "system", sm2: Off, gpg_ver: "lts", } - { name: "Fedora 39", container: "fedora-39-amd64", backend: "Botan", botan_ver: "system", gpg_ver: "system", } - { name: "Fedora 40", container: "fedora-40-amd64", backend: "Botan", botan_ver: "system", gpg_ver: "system", } - { name: "Fedora 40", container: "fedora-40-amd64", backend: "Botan", botan_ver: "3.1.1", gpg_ver: "system", } - { name: "Fedora 40", container: "fedora-40-amd64", backend: "Botan", botan_ver: "head", gpg_ver: "system", } - { name: "Fedora 40", container: "fedora-40-amd64", backend: "Botan", botan_ver: "3.3.0", pqc: On, gpg_ver: "system", } - { name: "CentOS 9", container: "centos-9-amd64", backend: "OpenSSL", gpg_ver: "lts", } - { name: "Fedora 39", container: "fedora-39-amd64", backend: "OpenSSL", gpg_ver: "system", } - { name: "Fedora 40", container: "fedora-40-amd64", backend: "OpenSSL", gpg_ver: "system", } - { name: "RHEL 8", container: "redhat-8-ubi", backend: "OpenSSL", gpg_ver: "system", } - { name: "RHEL 9", container: "redhat-9-ubi", backend: "OpenSSL", gpg_ver: "system", } include: # Coverage report for Botan 2.x backend - image: { name: "CentOS 9 Coverage", container: "centos-9-amd64", gpg_ver: stable, backend: Botan, botan_ver: "system", } env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: on } # Coverage report for Botan 3.x backend - image: { name: "Fedora 40 Coverage", container: "fedora-40-amd64", gpg_ver: stable, backend: Botan, botan_ver: "3.3.0", } env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: on } # Coverage report for OpenSSL 3.0 backend - image: { name: "Fedora 40 Coverage", container: "fedora-40-amd64", gpg_ver: stable, backend: OpenSSL, } env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: on } # Coverage report for OpenSSL 3.0 backend with disabled algos - image: { name: "Fedora 40 Coverage", container: "fedora-40-amd64", gpg_ver: stable, backend: OpenSSL, idea: Off, sm2: Off, two: Off, blow: Off, rmd: Off, bp: Off, } env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: on } # Coverage report for Botan backend with disabled algos - image: { name: "Fedora 40 Coverage", container: "fedora-40-amd64", gpg_ver: stable, backend: Botan, idea: Off, sm2: Off, two: Off, blow: Off, rmd: Off, bp: Off, } env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: on } # Coverage report for OpenSSL 1.1.1 backend within RHEL 8 - image: { name: "RHEL 8 Coverage", container: "redhat-8-ubi", gpg_ver: stable, backend: OpenSSL, } env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: on } # Coverage report for PQC - not running yet due to very low coverage #- image: { name: 'Fedora 40 PQC Coverage', container: 'fedora-40-amd64', gpg_ver: stable, backend: Botan, botan_ver: '3.3.0', pqc: On } # env: { CC: gcc, CXX: g++, BUILD_MODE: coverage, SHARED_LIBS: off } container: ghcr.io/rnpgp/ci-rnp-${{ matrix.image.container }} env: ${{ matrix.env }} steps: - name: Checkout uses: actions/checkout@v4 with: submodules: true - name: Setup environment run: | set -o errexit -o pipefail -o noclobber -o nounset /opt/tools/tools.sh select_crypto_backend_for_gha ${{ matrix.image.backend }} /opt/tools/tools.sh select_gpg_version_for_gha ${{ matrix.image.gpg_ver }} /opt/tools/tools.sh select_botan_version_for_gha ${{ matrix.image.botan_ver }} echo "ENABLE_SM2=${{ matrix.image.sm2 }}" >> $GITHUB_ENV echo "ENABLE_IDEA=${{ matrix.image.idea }}" >> $GITHUB_ENV echo "ENABLE_TWOFISH=${{ matrix.image.two }}" >> $GITHUB_ENV echo "ENABLE_BLOWFISH=${{ matrix.image.blow }}" >> $GITHUB_ENV echo "ENABLE_RIPEMD160=${{ matrix.image.rmd }}" >> $GITHUB_ENV echo "ENABLE_BRAINPOOL=${{ matrix.image.bp }}" >> $GITHUB_ENV echo "ENABLE_PQC=${{ matrix.image.pqc }}" >> $GITHUB_ENV echo CORES="$(nproc --all)" >> $GITHUB_ENV useradd rnpuser printf "\nrnpuser\tALL=(ALL)\tNOPASSWD:\tALL" > /etc/sudoers.d/rnpuser printf "\nrnpuser\tsoft\tnproc\tunlimited\n" > /etc/security/limits.d/30-rnpuser.conf # Need to build HEAD version since it is always different - name: Build gpg head if: matrix.image.gpg_ver == 'head' run: /opt/tools/tools.sh build_and_install_gpg head - name: Build botan head if: matrix.image.botan_ver == 'head' # Botan's head renamed curve25519 module to x25519, however this didn't get to 3.5.0 release yet run: | sed -i 's/curve25519/x25519/g' /opt/tools/botan3-modules /opt/tools/botan3-pqc-modules /opt/tools/tools.sh build_and_install_botan head - name: Configure run: | set -o errexit -o pipefail -o noclobber -o nounset [[ "${{ env.BUILD_MODE }}" = "coverage" ]] && cov_opt=(-DENABLE_COVERAGE=yes) [[ "${{ env.BUILD_MODE }}" = "sanitize" ]] && san_opt=(-DENABLE_SANITIZERS=yes) [ -n "$ENABLE_SM2" ] && sm2_opt=(-DENABLE_SM2="$ENABLE_SM2") [ -n "$ENABLE_IDEA" ] && idea_opt=(-DENABLE_IDEA="$ENABLE_IDEA") [ -n "$ENABLE_TWOFISH" ] && two_opt=(-DENABLE_TWOFISH="$ENABLE_TWOFISH") [ -n "$ENABLE_BLOWFISH" ] && blow_opt=(-DENABLE_BLOWFISH="$ENABLE_BLOWFISH") [ -n "$ENABLE_RIPEMD160" ] && rmd_opt=(-DENABLE_RIPEMD160="$ENABLE_RIPEMD160") [ -n "$ENABLE_BRAINPOOL" ] && bp_opt=(-DENABLE_BRAINPOOL="$ENABLE_BRAINPOOL") [ -n "$ENABLE_PQC" ] && pqc_opt=(-DENABLE_PQC="$ENABLE_PQC" -DENABLE_CRYPTO_REFRESH="$ENABLE_PQC") cmake -B build \ -DBUILD_SHARED_LIBS=${{ env.SHARED_LIBS }} \ -DDOWNLOAD_GTEST=ON \ -DCMAKE_BUILD_TYPE=Release \ -DCRYPTO_BACKEND=${{ matrix.image.backend }} \ ${sm2_opt:-} ${idea_opt:-} ${two_opt:-} ${blow_opt:-} ${rmd_opt:-} ${bp_opt:-} ${pqc_opt[@]:-} ${cov_opt:-} ${san_opt:-} . - name: Build run: cmake --build build --parallel ${{ env.CORES }} - name: Test run: | mkdir -p "build/Testing/Temporary" cp "cmake/CTestCostData.txt" "build/Testing/Temporary" export PATH="$PWD/build/src/lib:$PATH" chown -R rnpuser:rnpuser $PWD exec su rnpuser -c "ctest --parallel ${{ env.CORES }} --test-dir build --output-on-failure" - name: Coverage if: env.BUILD_MODE == 'coverage' run: | curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --keyring trustedkeys.gpg --import # One-time step curl -Os https://uploader.codecov.io/latest/linux/codecov curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig gpgv codecov.SHA256SUM.sig codecov.SHA256SUM shasum -a 256 -c codecov.SHA256SUM chmod +x codecov find "build" -type f -name '*.gcno' -exec gcov -p {} + ./codecov - name: Install if: env.BUILD_MODE != 'coverage' && env.SHARED_LIBS == 'on' run: cmake --install build - name: Checkout shell test framework if: env.BUILD_MODE != 'coverage' && env.SHARED_LIBS == 'on' uses: actions/checkout@v4 with: repository: kward/shunit2 path: ci/tests/shunit2 - name: Run additional ci tests if: env.BUILD_MODE != 'coverage' && env.SHARED_LIBS == 'on' run: RNP_INSTALL=/usr/local ci/tests/ci-tests.sh package-source: runs-on: ubuntu-latest container: ghcr.io/rnpgp/ci-rnp-${{ matrix.image.container }} timeout-minutes: 30 # needs: tests strategy: fail-fast: false matrix: image: - { name: "CentOS 9", container: "centos-9-amd64" } - { name: "Fedora 39", container: "fedora-39-amd64" } - { name: "Fedora 40", container: "fedora-40-amd64" } name: Package ${{ matrix.image.name }} SRPM steps: - name: Install rpm tools run: yum -y install rpm-build - name: Checkout uses: actions/checkout@v4 with: submodules: true - name: Configure run: cmake -B build -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF - name: Package SRPM run: cpack -B build/SRPM -G RPM --config build/CPackSourceConfig.cmake - name: Upload SRPM uses: actions/upload-artifact@v4 with: name: "SRPM ${{ matrix.image.name }}" path: "build/SRPM/*.src.rpm" retention-days: 5 - name: Stash packaging tests uses: actions/upload-artifact@v4 with: name: "tests-${{ matrix.image.name }}" path: "ci/tests/**" retention-days: 1 package: runs-on: ubuntu-latest container: ghcr.io/rnpgp/ci-rnp-${{ matrix.image.container }} timeout-minutes: 30 needs: package-source strategy: fail-fast: false matrix: image: - { name: "CentOS 9", container: "centos-9-amd64" } - { name: "Fedora 39", container: "fedora-39-amd64" } - { name: "Fedora 40", container: "fedora-40-amd64" } name: Package ${{ matrix.image.name }} RPM steps: - name: Install rpm tools run: yum -y install rpm-build - name: Download SRPM uses: actions/download-artifact@v4 with: name: "SRPM ${{ matrix.image.name }}" path: ~/rpmbuild/SRPMS - name: Extract SRPM run: | rpm -i -v ~/rpmbuild/SRPMS/*.src.rpm tar xzf ~/rpmbuild/SOURCES/*.tar.gz --strip 1 -C ~/rpmbuild/SOURCES - name: Build rnp run: | cmake ~/rpmbuild/SOURCES -B ~/rpmbuild/SOURCES/BUILD -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF \ -DCMAKE_INSTALL_PREFIX=/usr cmake --build ~/rpmbuild/SOURCES/BUILD --config Release - name: Package rpm run: cpack -G RPM -B ~/rpmbuild/SOURCES/RPMS --config ~/rpmbuild/SOURCES/BUILD/CPackConfig.cmake - name: Upload Artifact uses: actions/upload-artifact@v4 with: name: "RPM ${{ matrix.image.name}}" path: "~/rpmbuild/SOURCES/RPMS/*.rpm" retention-days: 5 # The main purpose of this step is to test the RPMS in a pristine environment (as for the end user). # ci-scripts are deliberately not used, as they recreate the development environment, # and this is something we proudly reject here rpm-tests: runs-on: ubuntu-latest needs: package container: ${{ matrix.image.container }} timeout-minutes: 30 strategy: fail-fast: false matrix: image: - { name: "CentOS 9", container: "quay.io/centos/centos:stream9" } # Fedora 39 is disabled since it has cmake issue which prevents man pages to be packaged. # Please see package step for error message. #- { name: 'Fedora 39', container: 'fedora:39' } - { name: "Fedora 40", container: "fedora:40" } name: RPM test on ${{ matrix.image.name }} steps: - name: Install prerequisites run: yum -y install sudo wget binutils # Fedora 39/40 packages depend on botan.so.19 that comes Fedora package, that is available by default # CentOS 9 depend on botan.so.19 and needs EPEL9 repo that needs to be installed - name: Install epel-release if: matrix.image.container == 'quay.io/centos/centos:stream9' run: | sudo dnf -y install 'dnf-command(config-manager)' sudo dnf config-manager --set-enabled crb sudo dnf -y install epel-release - name: Install xargs if: matrix.image.container == 'fedora:39' run: sudo yum -y install findutils - name: Download rnp rpms uses: actions/download-artifact@v4 with: name: "RPM ${{ matrix.image.name}}" - name: Checkout shell test framework uses: actions/checkout@v4 with: repository: kward/shunit2 path: ci/tests/shunit2 - name: Unstash tests if: matrix.image.container != 'centos:7' uses: actions/download-artifact@v4 with: name: tests-${{ matrix.image.name }} path: ci/tests - name: Run rpm tests # RPM tests # - no source checkout or upload [we get only test scripts from the previous step using GHA artifacts] # - no environment set up with rnp scripts # - no dependencies setup, we test that yum can install whatever is required run: | chmod +x ci/tests/rpm-tests.sh ci/tests/rpm-tests.sh - name: Run symbol visibility tests run: | chmod +x ci/tests/ci-tests.sh sudo yum -y localinstall librnp0-0*.*.rpm librnp0-devel-0*.*.rpm rnp0-0*.*.rpm ci/tests/ci-tests.sh sudo yum -y erase $(rpm -qa | grep rnp) - name: Setup minimalistic build environment run: | sudo yum -y install make gcc gcc-c++ zlib-devel bzip2-devel botan2-devel mkdir cmake wget https://github.com/Kitware/CMake/releases/download/v3.12.0/cmake-3.12.0-Linux-x86_64.sh -O cmake/cmake.sh sudo sh cmake/cmake.sh --skip-license --prefix=/usr/local # el8, el9, fr35, fr36 provide json-c-devel (version 12+) - name: Setup json-c run: sudo yum -y install json-c-devel - name: Run packaging tests run: | chmod +x ci/tests/pk-tests.sh ci/tests/pk-tests.sh github-actions-models-0.14.0/tests/sample-workflows/runs-on-expr.yml000064400000000000000000000004161046102023000236660ustar 00000000000000name: runs-on-expr on: [push] jobs: check-bats-version: runs-on: ${{ matrix.runner }} steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: "14" - run: npm install -g bats - run: bats -v github-actions-models-0.14.0/tests/sample-workflows/runs-on-group-only.yml000064400000000000000000000006231046102023000250230ustar 00000000000000# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#choosing-runners-in-a-group name: learn-github-actions on: [push] jobs: check-bats-version: runs-on: group: ubuntu-runners steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: "14" - run: npm install -g bats - run: bats -v github-actions-models-0.14.0/tests/sample-workflows/scalar-trigger-type.yml000064400000000000000000000003551046102023000252000ustar 00000000000000# repro case for https://github.com/woodruffw/github-actions-models/issues/12 name: issue-12 on: repository_dispatch: types: some-external-type jobs: dummy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 github-actions-models-0.14.0/tests/test_action.rs000064400000000000000000000030271046102023000201350ustar 00000000000000use std::{env, path::Path}; use github_actions_models::{ action::{Action, Runs}, common::If, }; fn load_action(name: &str) -> Action { let action_path = Path::new(env!("CARGO_MANIFEST_DIR")) .join("tests/sample-actions") .join(name); let action_contents = std::fs::read_to_string(action_path).unwrap(); serde_yaml::from_str(&action_contents).unwrap() } #[test] fn test_load_all() { let sample_actions = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/sample-actions"); for sample_action in std::fs::read_dir(sample_actions).unwrap() { let sample_action = sample_action.unwrap().path(); let action_contents = std::fs::read_to_string(sample_action).unwrap(); serde_yaml::from_str::(&action_contents).unwrap(); } } #[test] fn test_setup_python() { let setup_python = load_action("setup-python.yml"); assert_eq!(setup_python.name, "Setup Python"); assert_eq!( setup_python.description.unwrap(), "Set up a specific version of Python and add the command-line tools to the PATH." ); assert_eq!(setup_python.author.unwrap(), "GitHub"); assert_eq!(setup_python.inputs.len(), 9); assert_eq!(setup_python.outputs.len(), 3); let Runs::JavaScript(runs) = setup_python.runs else { unreachable!(); }; assert_eq!(runs.using, "node20"); assert_eq!(runs.main, "dist/setup/index.js"); assert_eq!(runs.post.unwrap(), "dist/cache-save/index.js"); assert_eq!(runs.post_if.unwrap(), If::Expr("success()".into())); } github-actions-models-0.14.0/tests/test_dependabot_v2.rs000064400000000000000000000044411046102023000213750ustar 00000000000000use std::path::Path; use github_actions_models::dependabot::v2::{ Dependabot, Interval, PackageEcosystem, RebaseStrategy, }; use indexmap::IndexSet; fn load_dependabot(name: &str) -> Dependabot { let workflow_path = Path::new(env!("CARGO_MANIFEST_DIR")) .join("tests/sample-dependabot/v2") .join(name); let dependabot_contents = std::fs::read_to_string(workflow_path).unwrap(); serde_yaml::from_str(&dependabot_contents).unwrap() } #[test] fn test_load_all() { let sample_configs = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/sample-dependabot/v2"); for sample_config in std::fs::read_dir(sample_configs).unwrap() { let sample_workflow = sample_config.unwrap().path(); let contents = std::fs::read_to_string(sample_workflow).unwrap(); serde_yaml::from_str::(&contents).unwrap(); } } #[test] fn test_contents() { let dependabot = load_dependabot("sigstore-python.yml"); assert_eq!(dependabot.version, 2); assert_eq!(dependabot.updates.len(), 3); let pip = &dependabot.updates[0]; assert_eq!(pip.package_ecosystem, PackageEcosystem::Pip); assert_eq!(pip.directory, "/"); assert_eq!(pip.schedule.interval, Interval::Daily); assert_eq!(pip.open_pull_requests_limit, 5); // default let github_actions = &dependabot.updates[1]; assert_eq!( github_actions.package_ecosystem, PackageEcosystem::GithubActions ); assert_eq!(github_actions.directory, "/"); assert_eq!(github_actions.open_pull_requests_limit, 99); assert_eq!(github_actions.rebase_strategy, RebaseStrategy::Disabled); assert_eq!(github_actions.groups.len(), 1); assert_eq!( github_actions.groups["actions"].patterns, IndexSet::from(["*".to_string()]) ); let github_actions = &dependabot.updates[2]; assert_eq!( github_actions.package_ecosystem, PackageEcosystem::GithubActions ); assert_eq!(github_actions.directory, ".github/actions/upload-coverage/"); assert_eq!(github_actions.open_pull_requests_limit, 99); assert_eq!(github_actions.rebase_strategy, RebaseStrategy::Disabled); assert_eq!(github_actions.groups.len(), 1); assert_eq!( github_actions.groups["actions"].patterns, IndexSet::from(["*".to_string()]) ); } github-actions-models-0.14.0/tests/test_workflow.rs000064400000000000000000000053011046102023000205270ustar 00000000000000use std::{env, path::Path}; use github_actions_models::{ common::expr::{ExplicitExpr, LoE}, workflow::{ event::OptionalBody, job::{RunsOn, StepBody}, Job, Trigger, Workflow, }, }; fn load_workflow(name: &str) -> Workflow { let workflow_path = Path::new(env!("CARGO_MANIFEST_DIR")) .join("tests/sample-workflows") .join(name); let workflow_contents = std::fs::read_to_string(workflow_path).unwrap(); serde_yaml::from_str(&workflow_contents).unwrap() } #[test] fn test_load_all() { let sample_workflows = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/sample-workflows"); for sample_action in std::fs::read_dir(sample_workflows).unwrap() { let sample_workflow = sample_action.unwrap().path(); let workflow_contents = std::fs::read_to_string(sample_workflow).unwrap(); serde_yaml::from_str::(&workflow_contents).unwrap(); } } #[test] fn test_pip_audit_ci() { let workflow = load_workflow("pip-audit-ci.yml"); assert!( matches!(workflow.on, Trigger::Events(events) if matches!(events.pull_request, OptionalBody::Default)) ); let test_job = &workflow.jobs["test"]; let Job::NormalJob(test_job) = test_job else { panic!("expected normal job"); }; assert_eq!(test_job.name, None); assert_eq!( test_job.runs_on, LoE::Literal(RunsOn::Target(vec!["ubuntu-latest".to_string()])) ); assert_eq!(test_job.steps.len(), 3); let StepBody::Uses { uses, with } = &test_job.steps[0].body else { panic!("expected uses step"); }; assert_eq!(uses, "actions/checkout@v4.1.1"); assert!(with.is_empty()); let StepBody::Uses { uses, with } = &test_job.steps[1].body else { panic!("expected uses step"); }; assert_eq!(uses, "actions/setup-python@v5"); assert_eq!(with["python-version"].to_string(), "${{ matrix.python }}"); assert_eq!(with["cache"].to_string(), "pip"); assert_eq!(with["cache-dependency-path"].to_string(), "pyproject.toml"); let StepBody::Run { run, working_directory, shell, env: LoE::Literal(env), } = &test_job.steps[2].body else { panic!("expected run step"); }; assert_eq!(run, "make test PIP_AUDIT_EXTRA=test"); assert!(working_directory.is_none()); assert!(shell.is_none()); assert!(env.is_empty()); } #[test] fn test_runs_on_expr() { let workflow = load_workflow("runs-on-expr.yml"); let job = workflow.jobs.get("check-bats-version").unwrap(); let Job::NormalJob(job) = job else { panic!() }; assert_eq!( job.runs_on, LoE::Expr(ExplicitExpr::from_curly("${{ matrix.runner }}").unwrap()) ); }