pax_global_header00006660000000000000000000000064144074014770014522gustar00rootroot0000000000000052 comment=c5d8db18cd90807e53e5fa5f8bd8e4eb58570c30 protoc-gen-star-2.0.3/000077500000000000000000000000001440740147700145505ustar00rootroot00000000000000protoc-gen-star-2.0.3/.github/000077500000000000000000000000001440740147700161105ustar00rootroot00000000000000protoc-gen-star-2.0.3/.github/workflows/000077500000000000000000000000001440740147700201455ustar00rootroot00000000000000protoc-gen-star-2.0.3/.github/workflows/pre-commit.yml000066400000000000000000000004671440740147700227530ustar00rootroot00000000000000name: pre-commit on: push: branches: - master pull_request: branches: - master jobs: pre-commit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-go@v2 with: go-version: '1.17' - uses: pre-commit/action@v3.0.0 protoc-gen-star-2.0.3/.github/workflows/test.yml000066400000000000000000000020431440740147700216460ustar00rootroot00000000000000on: push: branches: - master pull_request: branches: - master jobs: test: runs-on: ubuntu-latest env: GOPATH: ${{ github.workspace }} GOBIN: ${{ github.workspace }}/bin defaults: run: working-directory: ${{ env.GOPATH }}/src/github.com/lyft/protoc-gen-star name: protoc version 3.17.0 steps: - uses: actions/checkout@v2 with: fetch-depth: 1 path: ${{ env.GOPATH }}/src/github.com/lyft/protoc-gen-star - name: Set Up Go uses: actions/setup-go@v2 with: go-version: '1.17' - run: mkdir -p $GOPATH/bin - run: wget "https://github.com/protocolbuffers/protobuf/releases/download/v3.17.0/protoc-3.17.0-linux-x86_64.zip" -O /tmp/protoc.zip - run: unzip /tmp/protoc.zip -d /tmp - run: sudo mv /tmp/bin/protoc /usr/local/bin/protoc - run: sudo mv /tmp/include/google /usr/local/include/google - name: Generate Testdata run: make testdata - name: Run Tests run: make tests protoc-gen-star-2.0.3/.gitignore000066400000000000000000000001701440740147700165360ustar00rootroot00000000000000vendor/ bin/ cover.* testdata/fdset.bin testdata/generated/ **/*.pb.go **/code_generator_request.pb.bin .idea/ .vscode/ protoc-gen-star-2.0.3/.golangci.yml000066400000000000000000000005701440740147700171360ustar00rootroot00000000000000linters: disable-all: true enable: - deadcode - goconst - gocyclo - gofmt - goimports - gosimple - govet - ineffassign - misspell - revive - structcheck - typecheck - unconvert - unparam - unused - varcheck issues: max-per-linter: 0 max-same-issues: 0 run: build-tags: - integration deadline: 5m protoc-gen-star-2.0.3/.pre-commit-config.yaml000066400000000000000000000002511440740147700210270ustar00rootroot00000000000000default_language_version: python: python3.8 repos: - repo: https://github.com/golangci/golangci-lint rev: v1.42.1 hooks: - id: golangci-lint protoc-gen-star-2.0.3/CODE_OF_CONDUCT.md000066400000000000000000000024711440740147700173530ustar00rootroot00000000000000# Code of Conduct All Lyft open source projects are governed by the Contributor Covenant version 1.4 (http://contributor-covenant.org/version/1/4/code_of_conduct.md). All contributors and participants agree to abide by its terms. To report violations, send an email to oss-coc@lyft.com. Reports can be as short as a notification with a link, but more information will help us understand what is happening. You can include: * Your contact information (e.g. your github username), if you want to identify yourself * Your account of the incident: * Where and when it happened * A description of the unacceptable behavior * Who was involved and who saw it happen * Whether the incident is ongoing * Any additional information that will help us fully understand the problem, such as previous incidents or special circumstances * Links to public records of the incident, if any * Screenshots showing what happened ## Attribution * Governance section adapted from [Contributor Covenant's CODE_OF_CONDUCT.md](https://github.com/ContributorCovenant/contributor_covenant/blob/master/CODE_OF_CONDUCT.md), which is licensed under the MIT license. * Report information adapted from [Wikimedia Foundation's Code of Conduct draft](https://www.mediawiki.org/wiki/Code_of_Conduct/Draft), which is licensed under the CC BY-SA 3.0 license.protoc-gen-star-2.0.3/LICENSE000066400000000000000000000261351440740147700155640ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.protoc-gen-star-2.0.3/Makefile000066400000000000000000000067001440740147700162130ustar00rootroot00000000000000# the name of this package PKG := $(shell go list .) PROTOC_VER := $(shell protoc --version | cut -d' ' -f2) .PHONY: bootstrap bootstrap: testdata # set up the project for development .PHONY: quick quick: testdata # runs all tests without the race detector or coverage ifeq ($(PROTOC_VER), 3.17.0) go test $(PKGS) --tags=proto3_presence else go test $(PKGS) endif .PHONY: tests tests: testdata # runs all tests against the package with race detection and coverage percentage ifeq ($(PROTOC_VER), 3.17.0) go test -race -cover ./... --tags=proto3_presence else go test -race -cover ./... endif .PHONY: cover cover: testdata # runs all tests against the package, generating a coverage report and opening it in the browser ifeq ($(PROTOC_VER), 3.17.0) go test -race -covermode=atomic -coverprofile=cover.out ./... --tags=proto3_presence || true else go test -race -covermode=atomic -coverprofile=cover.out ./... || true endif go tool cover -html cover.out -o cover.html open cover.html .PHONY: docs docs: # starts a doc server and opens a browser window to this package (sleep 2 && open http://localhost:6060/pkg/$(PKG)/) & godoc -http=localhost:6060 .PHONY: testdata testdata: testdata-graph testdata-go testdata/generated testdata/fdset.bin # generate all testdata .PHONY: testdata-graph testdata-graph: bin/protoc-gen-debug # parses the proto file sets in testdata/graph and renders binary CodeGeneratorRequest set -e; for subdir in `find ./testdata/graph -mindepth 1 -maxdepth 1 -type d`; do \ protoc -I ./testdata/graph \ --plugin=protoc-gen-debug=./bin/protoc-gen-debug \ --debug_out="$$subdir:$$subdir" \ `find $$subdir -name "*.proto"`; \ done testdata/generated: protoc-gen-go bin/protoc-gen-example go install google.golang.org/protobuf/cmd/protoc-gen-go rm -rf ./testdata/generated && mkdir -p ./testdata/generated # generate the official go code, must be one directory at a time set -e; for subdir in `find ./testdata/protos -mindepth 1 -type d`; do \ files=`find $$subdir -maxdepth 1 -name "*.proto"`; \ [ ! -z "$$files" ] && \ protoc -I ./testdata/protos \ --go_out="$$GOPATH/src" \ $$files; \ done # generate using our demo plugin, don't need to go directory at a time set -e; for subdir in `find ./testdata/protos -mindepth 1 -maxdepth 1 -type d`; do \ protoc -I ./testdata/protos \ --plugin=protoc-gen-example=./bin/protoc-gen-example \ --example_out="paths=source_relative:./testdata/generated" \ `find $$subdir -name "*.proto"`; \ done testdata/fdset.bin: @protoc -I ./testdata/protos \ -o ./testdata/fdset.bin \ --include_imports \ testdata/protos/**/*.proto .PHONY: testdata-go testdata-go: protoc-gen-go bin/protoc-gen-debug # generate go-specific testdata cd lang/go && $(MAKE) \ testdata-names \ testdata-packages \ testdata-outputs ifeq ($(PROTOC_VER), 3.17.0) cd lang/go && $(MAKE) \ testdata-presence endif .PHONY: protoc-gen-go protoc-gen-go: go install google.golang.org/protobuf/cmd/protoc-gen-go bin/protoc-gen-example: # creates the demo protoc plugin for demonstrating uses of PG* go build -o ./bin/protoc-gen-example ./testdata/protoc-gen-example bin/protoc-gen-debug: # creates the protoc-gen-debug protoc plugin for output ProtoGeneratorRequest messages go build -o ./bin/protoc-gen-debug ./protoc-gen-debug .PHONY: clean clean: rm -rf bin rm -rf testdata/generated set -e; for f in `find . -name *.pb.bin`; do \ rm $$f; \ done set -e; for f in `find . -name *.pb.go`; do \ rm $$f; \ done protoc-gen-star-2.0.3/README.md000066400000000000000000000523751440740147700160430ustar00rootroot00000000000000# protoc-gen-star (PG*) [![Build Status](https://travis-ci.org/lyft/protoc-gen-star.svg?branch=master)](https://travis-ci.org/lyft/protoc-gen-star) [![GoDoc](https://godoc.org/github.com/lyft/protoc-gen-star?status.svg)](https://godoc.org/github.com/lyft/protoc-gen-star) **!!! THIS PROJECT IS A WORK-IN-PROGRESS | THE API SHOULD BE CONSIDERED UNSTABLE !!!** _PG* is a protoc plugin library for efficient proto-based code generation_ ```go package main import "github.com/lyft/protoc-gen-star/v2" func main() { pgs.Init(pgs.DebugEnv("DEBUG")). RegisterModule(&myPGSModule{}). RegisterPostProcessor(&myPostProcessor{}). Render() } ``` ## Features ### Documentation While this README seeks to describe many of the nuances of `protoc` plugin development and using PG*, the true documentation source is the code itself. The Go language is self-documenting and provides tools for easily reading through it and viewing examples. The docs can be viewed on [GoDoc](https://godoc.org/github.com/lyft/protoc-gen-star) or locally by running `make docs`, which will start a `godoc` server and open them in the default browser. ### Roadmap - [x] Interface-based and fully-linked dependency graph with access to raw descriptors - [x] Built-in context-aware debugging capabilities - [x] Exhaustive, near 100% unit test coverage - [x] End-to-end testable via overrideable IO & Interface based API - [x] [`Visitor`][visitor] pattern and helpers for efficiently walking the dependency graph - [x] [`BuildContext`][context] to facilitate complex generation - [x] Parsed, typed command-line [`Parameters`][params] access - [x] Extensible `ModuleBase` for quickly creating `Modules` and facilitating code generation - [x] Configurable post-processing (eg, gofmt) of generated files - [x] Support processing proto files from multiple packages - [x] Load comments (via SourceCodeInfo) from proto files into gathered AST for easy access - [x] Language-specific helper subpackages for handling common, nuanced generation tasks - [ ] Load plugins/modules at runtime using Go shared libraries ### Examples [`protoc-gen-example`][pge], can be found in the `testdata` directory. It includes two `Module` implementations using a variety of the features available. It's `protoc` execution is included in the `testdata/generated` [Makefile][make] target. Examples are also accessible via the documentation by running `make docs`. ## How It Works ### The `protoc` Flow Because the process is somewhat confusing, this section will cover the entire flow of how proto files are converted to generated code, using a hypothetical PG* plugin: `protoc-gen-myplugin`. A typical execution looks like this: ```sh protoc \ -I . \ --myplugin_out="foo=bar:../generated" \ ./pkg/*.proto ``` `protoc`, the PB compiler, is configured using a set of flags (documented under `protoc -h`) and handed a set of files as arguments. In this case, the `I` flag can be specified multiple times and is the lookup path it uses for imported dependencies in a proto file. By default, the official descriptor protos are already included. `myplugin_out` tells `protoc` to use the `protoc-gen-myplugin` protoc-plugin. These plugins are automatically resolved from the system's `PATH` environment variable, or can be explicitly specified with another flag. The official protoc-plugins (eg, `protoc-gen-python`) are already registered with `protoc`. The flag's value is specific to the particular plugin, with the exception of the `:../generated` suffix. This suffix indicates the root directory in which `protoc` will place the generated files from that package (relative to the current working directory). This generated output directory is _not_ propagated to `protoc-gen-myplugin`, however, so it needs to be duplicated in the left-hand side of the flag. PG* supports this via an `output_path` parameter. `protoc` parses the passed in proto files, ensures they are syntactically correct, and loads any imported dependencies. It converts these files and the dependencies into descriptors (which are themselves PB messages) and creates a `CodeGeneratorRequest` (yet another PB). `protoc` serializes this request and then executes each configured protoc-plugin, sending the payload via `stdin`. `protoc-gen-myplugin` starts up, receiving the request payload, which it unmarshals. There are two phases to a PG*-based protoc-plugin. First, PG* unmarshals the `CodeGeneratorRequest` received from `protoc`, and creates a fully connected abstract syntax tree (AST) of each file and all its contained entities. Any parameters specified for this plugin are also parsed for later consumption. When this step is complete, PG* then executes any registered `Modules`, handing it the constructed AST. `Modules` can be written to generate artifacts (eg, files) or just performing some form of validation over the provided graph without any other side effects. `Modules` provide the great flexibility in terms of operating against the PBs. Once all `Modules` are run, PG* writes any custom artifacts to the file system or serializes generator-specific ones into a `CodeGeneratorResponse` and sends the data to its `stdout`. `protoc` receives this payload, unmarshals it, and persists any requested files to disk after all its plugins have returned. This whole flow looks something like this: ``` foo.proto → protoc → CodeGeneratorRequest → protoc-gen-myplugin → CodeGeneratorResponse → protoc → foo.pb.go ``` The PG* library hides away nearly all of this complexity required to implement a protoc-plugin! ### Modules PG* `Modules` are handed a complete AST for those files that are targeted for generation as well as all dependencies. A `Module` can then add files to the protoc `CodeGeneratorResponse` or write files directly to disk as `Artifacts`. PG* provides a `ModuleBase` struct to simplify developing modules. Out of the box, it satisfies the interface for a `Module`, only requiring the creation of `Name` and `Execute` methods. `ModuleBase` is best used as an anonyomous embedded field of a wrapping `Module` implementation. A minimal module would look like the following: ```go // ReportModule creates a report of all the target messages generated by the // protoc run, writing the file into the /tmp directory. type reportModule struct { *pgs.ModuleBase } // New configures the module with an instance of ModuleBase func New() pgs.Module { return &reportModule{&pgs.ModuleBase{}} } // Name is the identifier used to identify the module. This value is // automatically attached to the BuildContext associated with the ModuleBase. func (m *reportModule) Name() string { return "reporter" } // Execute is passed the target files as well as its dependencies in the pkgs // map. The implementation should return a slice of Artifacts that represent // the files to be generated. In this case, "/tmp/report.txt" will be created // outside of the normal protoc flow. func (m *reportModule) Execute(targets map[string]pgs.File, pkgs map[string]pgs.Package) []pgs.Artifact { buf := &bytes.Buffer{} for _, f := range targets { m.Push(f.Name().String()).Debug("reporting") fmt.Fprintf(buf, "--- %v ---", f.Name()) for i, msg := range f.AllMessages() { fmt.Fprintf(buf, "%03d. %v\n", i, msg.Name()) } m.Pop() } m.OverwriteCustomFile( "/tmp/report.txt", buf.String(), 0644, ) return m.Artifacts() } ``` `ModuleBase` exposes a PG* [`BuildContext`][context] instance, already prefixed with the module's name. Calling `Push` and `Pop` allows adding further information to error and debugging messages. Above, each file from the target package is pushed onto the context before logging the "reporting" debug message. The base also provides helper methods for adding or overwriting both protoc-generated and custom files. The above execute method creates a custom file at `/tmp/report.txt` specifying that it should overwrite an existing file with that name. If it instead called `AddCustomFile` and the file existed, no file would have been generated (though a debug message would be logged out). Similar methods exist for adding generator files, appends, and injections. Likewise, methods such as `AddCustomTemplateFile` allows for `Templates` to be rendered instead. After all modules have been executed, the returned `Artifacts` are either placed into the `CodeGenerationResponse` payload for protoc or written out to the file system. For testing purposes, the file system has been abstracted such that a custom one (such as an in-memory FS) can be provided to the PG* generator with the `FileSystem` `InitOption`. #### Post Processing `Artifacts` generated by `Modules` sometimes require some mutations prior to writing to disk or sending in the response to protoc. This could range from running `gofmt` against Go source or adding copyright headers to all generated source files. To simplify this task in PG*, a `PostProcessor` can be utilized. A minimal looking `PostProcessor` implementation might look like this: ```go // New returns a PostProcessor that adds a copyright comment to the top // of all generated files. func New(owner string) pgs.PostProcessor { return copyrightPostProcessor{owner} } type copyrightPostProcessor struct { owner string } // Match returns true only for Custom and Generated files (including templates). func (cpp copyrightPostProcessor) Match(a pgs.Artifact) bool { switch a := a.(type) { case pgs.GeneratorFile, pgs.GeneratorTemplateFile, pgs.CustomFile, pgs.CustomTemplateFile: return true default: return false } } // Process attaches the copyright header to the top of the input bytes func (cpp copyrightPostProcessor) Process(in []byte) (out []byte, err error) { cmt := fmt.Sprintf("// Copyright © %d %s. All rights reserved\n", time.Now().Year(), cpp.owner) return append([]byte(cmt), in...), nil } ``` The `copyrightPostProcessor` struct satisfies the `PostProcessor` interface by implementing the `Match` and `Process` methods. After PG* recieves all `Artifacts`, each is handed in turn to each registered processor's `Match` method. In the above case, we return `true` if the file is a part of the targeted Artifact types. If `true` is returned, `Process` is immediately called with the rendered contents of the file. This method mutates the input, returning the modified value to out or an error if something goes wrong. Above, the notice is prepended to the input. PostProcessors are registered with PG* similar to `Modules`: ```go g := pgs.Init(pgs.IncludeGo()) g.RegisterModule(some.NewModule()) g.RegisterPostProcessor(copyright.New("PG* Authors")) ``` ## Protocol Buffer AST While `protoc` ensures that all the dependencies required to generate a proto file are loaded in as descriptors, it's up to the protoc-plugins to recognize the relationships between them. To get around this, PG* uses constructs an abstract syntax tree (AST) of all the `Entities` loaded into the plugin. This AST is provided to every `Module` to facilitate code generation. ### Hierarchy The hierarchy generated by the PG* `gatherer` is fully linked, starting at a top-level `Package` down to each individual `Field` of a `Message`. The AST can be represented with the following digraph:

A `Package` describes a set of `Files` loaded within the same namespace. As would be expected, a `File` represents a single proto file, which contains any number of `Message`, `Enum` or `Service` entities. An `Enum` describes an integer-based enumeration type, containing each individual `EnumValue`. A `Service` describes a set of RPC `Methods`, which in turn refer to their input and output `Messages`. A `Message` can contain other nested `Messages` and `Enums` as well as each of its `Fields`. For non-scalar types, a `Field` may also reference its `Message` or `Enum` type. As a mechanism for achieving union types, a `Message` can also contain `OneOf` entities that refer to some of its `Fields`. ### Visitor Pattern The structure of the AST can be fairly complex and unpredictable. Likewise, `Module's` are typically concerned with only a subset of the entities in the graph. To separate the `Module's` algorithm from understanding and traversing the structure of the AST, PG* implements the `Visitor` pattern to decouple the two. Implementing this interface is straightforward and can greatly simplify code generation. Two base `Visitor` structs are provided by PG* to simplify developing implementations. First, the `NilVisitor` returns an instance that short-circuits execution for all Entity types. This is useful when certain branches of the AST are not interesting to code generation. For instance, if the `Module` is only concerned with `Services`, it can use a `NilVisitor` as an anonymous field and only implement the desired interface methods: ```go // ServiceVisitor logs out each Method's name type serviceVisitor struct { pgs.Visitor pgs.DebuggerCommon } func New(d pgs.DebuggerCommon) pgs.Visitor { return serviceVistor{ Visitor: pgs.NilVisitor(), DebuggerCommon: d, } } // Passthrough Packages, Files, and Services. All other methods can be // ignored since Services can only live in Files and Files can only live in a // Package. func (v serviceVisitor) VisitPackage(pgs.Package) (pgs.Visitor, error) { return v, nil } func (v serviceVisitor) VisitFile(pgs.File) (pgs.Visitor, error) { return v, nil } func (v serviceVisitor) VisitService(pgs.Service) (pgs.Visitor, error) { return v, nil } // VisitMethod logs out ServiceName#MethodName for m. func (v serviceVisitor) VisitMethod(m pgs.Method) (pgs.Vistitor, error) { v.Logf("%v#%v", m.Service().Name(), m.Name()) return nil, nil } ``` If access to deeply nested `Nodes` is desired, a `PassthroughVisitor` can be used instead. Unlike `NilVisitor` and as the name suggests, this implementation passes through all nodes instead of short-circuiting on the first unimplemented interface method. Setup of this type as an anonymous field is a bit more complex but avoids implementing each method of the interface explicitly: ```go type fieldVisitor struct { pgs.Visitor pgs.DebuggerCommon } func New(d pgs.DebuggerCommon) pgs.Visitor { v := &fieldVisitor{DebuggerCommon: d} v.Visitor = pgs.PassThroughVisitor(v) return v } func (v *fieldVisitor) VisitField(f pgs.Field) (pgs.Visitor, error) { v.Logf("%v.%v", f.Message().Name(), f.Name()) return nil, nil } ``` Walking the AST with any `Visitor` is straightforward: ```go v := visitor.New(d) err := pgs.Walk(v, pkg) ``` All `Entity` types and `Package` can be passed into `Walk`, allowing for starting a `Visitor` lower than the top-level `Package` if desired. ## Build Context `Modules` registered with the PG* `Generator` are initialized with an instance of `BuildContext` that encapsulates contextual paths, debugging, and parameter information. ### Output Paths The `BuildContext's` `OutputPath` method returns the output directory that the PG* plugin is targeting. This path is also initially `.` but refers to the directory in which `protoc` is executed. This default behavior can be overridden by providing an `output_path` in the flag. The `OutputPath` can be used to create file names for `Artifacts`, using `JoinPath(name ...string)` which is essentially an alias for `filepath.Join(ctx.OutputPath(), name...)`. Manually tracking directories relative to the `OutputPath` can be tedious, especially if the names are dynamic. Instead, a `BuildContext` can manage these, via `PushDir` and `PopDir`. ```go ctx.OutputPath() // foo ctx.JoinPath("fizz", "buzz.go") // foo/fizz/buzz.go ctx = ctx.PushDir("bar/baz") ctx.OutputPath() // foo/bar/baz ctx.JoinPath("quux.go") // foo/bar/baz/quux.go ctx = ctx.PopDir() ctx.OutputPath() // foo ``` `ModuleBase` wraps these methods to mutate their underlying `BuildContexts`. Those methods should be used instead of the ones on the contained `BuildContext` directly. ### Debugging The `BuildContext` exposes a `DebuggerCommon` interface which provides utilities for logging, error checking, and assertions. `Log` and the formatted `Logf` print messages to `os.Stderr`, typically prefixed with the `Module` name. `Debug` and `Debugf` behave the same, but only print if enabled via the `DebugMode` or `DebugEnv` `InitOptions`. `Fail` and `Failf` immediately stops execution of the protoc-plugin and causes `protoc` to fail generation with the provided message. `CheckErr` and `Assert` also fail with the provided messages if an error is passed in or if an expression evaluates to false, respectively. Additional contextual prefixes can be provided by calling `Push` and `Pop` on the `BuildContext`. This behavior is similar to `PushDir` and `PopDir` but only impacts log messages. `ModuleBase` wraps these methods to mutate their underlying `BuildContexts`. Those methods should be used instead of the ones on the contained `BuildContext` directly. ### Parameters The `BuildContext` also provides access to the pre-processed `Parameters` from the specified protoc flag. The only PG*-specific key expected is "output_path", which is utilized by a module's `BuildContext` for its `OutputPath`. PG* permits mutating the `Parameters` via the `MutateParams` `InitOption`. By passing in a `ParamMutator` function here, these KV pairs can be modified or verified prior to the PGG workflow begins. ## Language-Specific Subpackages While implemented in Go, PG* seeks to be language agnostic in what it can do. Therefore, beyond the pre-generated base descriptor types, PG* has no dependencies on the protoc-gen-go (PGG) package. However, there are many nuances that each language's protoc-plugin introduce that can be generalized. For instance, PGG package naming, import paths, and output paths are a complex interaction of the proto package name, the `go_package` file option, and parameters passed to protoc. While PG*'s core API should not be overloaded with many language-specific methods, subpackages can be provided that can operate on `Parameters` and `Entities` to derive the appropriate results. PG* currently implements the [pgsgo](https://godoc.org/github.com/lyft/protoc-gen-star/v2/lang/go/) subpackage to provide these utilities to plugins targeting the Go language. Future subpackages are planned to support a variety of languages. ## PG* Development & Make Targets PG* seeks to provide all the tools necessary to rapidly and ergonomically extend and build on top of the Protocol Buffer IDL. Whether the goal is to modify the official protoc-gen-go output or create entirely new files and packages, this library should offer a user-friendly wrapper around the complexities of the PB descriptors and the protoc-plugin workflow. ### Setup PG* can be installed and developed like any standard Go module: ```sh go get -u github.com/lyft/protoc-gen-star/v2 ``` ### Linting & Static Analysis To avoid style nits and also to enforce some best practices for Go packages, PG* requires passing `golint`, `go vet`, and `go fmt -s` for all code changes. ```sh make lint ``` ### Testing PG* strives to have near 100% code coverage by unit tests. Most unit tests are run in parallel to catch potential race conditions. There are three ways of running unit tests, each taking longer than the next but providing more insight into test coverage: ```sh # run code generation for the data used by the tests make testdata # run unit tests without race detection or code coverage reporting make quick # run unit tests with race detection and code coverage make tests # run unit tests with race detection and generates a code coverage report, opening in a browser make cover ``` #### protoc-gen-debug PG* comes with a specialized protoc-plugin, `protoc-gen-debug`. This plugin captures the CodeGeneratorRequest from a protoc execution and saves the serialized PB to disk. These files can be used as inputs to prevent calling protoc from tests. ### Documentation Go is a self-documenting language, and provides a built in utility to view locally: `godoc`. The following command starts a godoc server and opens a browser window to this package's documentation. If you see a 404 or unavailable page initially, just refresh. ```sh make docs ``` ### Demo PG* comes with a "kitchen sink" example: [`protoc-gen-example`][pge]. This protoc plugin built on top of PG* prints out the target package's AST as a tree to stderr. This provides an end-to-end way of validating each of the nuanced types and nesting in PB descriptors: ```sh # create the example PG*-based plugin make bin/protoc-gen-example # run protoc-gen-example against the demo protos make testdata/generated ``` #### CI PG* uses [TravisCI][travis] to validate all code changes. Please view the [configuration][travis.yml] for what tests are involved in the validation. [pgg]: https://github.com/golang/protobuf/tree/master/protoc-gen-go [pge]: https://github.com/lyft/protoc-gen-star/tree/master/testdata/protoc-gen-example [travis]: https://travis-ci.com/lyft/protoc-gen-star [travis.yml]: https://github.com/lyft/protoc-gen-star/tree/master/.travis.yml [module]: https://github.com/lyft/protoc-gen-star/blob/master/module.go [pb]: https://developers.google.com/protocol-buffers/ [context]: https://github.com/lyft/protoc-gen-star/tree/master/build_context.go [visitor]: https://github.com/lyft/protoc-gen-star/tree/master/node.go [params]: https://github.com/lyft/protoc-gen-star/tree/master/parameters.go [make]: https://github.com/lyft/protoc-gen-star/blob/master/Makefile [single]: https://github.com/golang/protobuf/pull/40 protoc-gen-star-2.0.3/artifact.go000066400000000000000000000240551440740147700167020ustar00rootroot00000000000000package pgs import ( "bytes" "errors" "io" "os" "path/filepath" "strings" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) // An Artifact describes the output for a Module. Typically this is the creation // of a file either directly against the file system or via protoc. type Artifact interface { artifact() } // A Template to use for rendering artifacts. Either text/template or // html/template Template types satisfy this interface. type Template interface { Execute(w io.Writer, data interface{}) error } // GeneratorArtifact describes an Artifact that uses protoc for code generation. // GeneratorArtifacts must be valid UTF8. To create binary files, use one of // the "custom" Artifact types. type GeneratorArtifact interface { Artifact // ProtoFile converts the GeneratorArtifact to a CodeGeneratorResponse_File, // which is handed to protoc to actually write the file to disk. An error is // returned if Artifact cannot be converted. ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) } // TemplateArtifact contains the shared logic used by Artifacts that render // their contents using a Template. type TemplateArtifact struct { // The Template to use for rendering. Either text/template or html/template // Template types are supported. Template Template // Data is arbitrary data passed into the Template's Execute method. Data interface{} } func (ta TemplateArtifact) render() (string, error) { buf := &bytes.Buffer{} if err := ta.Template.Execute(buf, ta.Data); err != nil { return "", err } return buf.String(), nil } // A GeneratorFile Artifact describes a file to be generated using protoc. type GeneratorFile struct { GeneratorArtifact // Name of the file to generate, relative to the protoc-plugin's generation // output directory. Name string // Contents are the body of the file. Contents string // Overwrite specifies whether or not this file should replace another file // with the same name if a prior Plugin or Module has created one. Overwrite bool } // ProtoFile satisfies the GeneratorArtifact interface. An error is returned if // the name field is not a path relative to and within the protoc-plugin's // generation output directory. func (f GeneratorFile) ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) { name, err := cleanGeneratorFileName(f.Name) if err != nil { return nil, err } return &plugin_go.CodeGeneratorResponse_File{ Name: proto.String(name), Content: proto.String(f.Contents), }, nil } // A GeneratorTemplateFile describes a file to be generated using protoc from // a Template. type GeneratorTemplateFile struct { GeneratorArtifact TemplateArtifact // Name of the file to generate, relative to the protoc-plugin's generation // output directory. Name string // Overwrite specifies whether or not this file should replace another file // with the same name if a prior Plugin or Module has created one. Overwrite bool } // ProtoFile satisfies the GeneratorArtifact interface. An error is returned if // the name field is not a path relative to and within the protoc-plugin's // generation output directory or if there is an error executing the Template. func (f GeneratorTemplateFile) ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) { name, err := cleanGeneratorFileName(f.Name) if err != nil { return nil, err } content, err := f.render() if err != nil { return nil, err } return &plugin_go.CodeGeneratorResponse_File{ Name: proto.String(name), Content: proto.String(content), }, nil } // A GeneratorAppend Artifact appends content to the end of the specified protoc // generated file. This Artifact can only be used if another Module generates a // file with the same name. type GeneratorAppend struct { GeneratorArtifact // Filename of the file to append to, relative to the protoc-plugin's generation // output directory. FileName string // Contents to be appended to the file Contents string } // ProtoFile satisfies the GeneratorArtifact interface. An error is returned if // the name field is not a path relative to and within the protoc-plugin's // generation output directory. func (f GeneratorAppend) ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) { if _, err := cleanGeneratorFileName(f.FileName); err != nil { return nil, err } return &plugin_go.CodeGeneratorResponse_File{ Content: proto.String(f.Contents), }, nil } // A GeneratorTemplateAppend appends content to a protoc-generated file from a // Template. See GeneratorAppend for limitations. type GeneratorTemplateAppend struct { GeneratorArtifact TemplateArtifact // Filename of the file to append to, relative to the protoc-plugin's generation // output directory. FileName string } // ProtoFile satisfies the GeneratorArtifact interface. An error is returned if // the name field is not a path relative to and within the protoc-plugin's // generation output directory or if there is an error executing the Template. func (f GeneratorTemplateAppend) ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) { if _, err := cleanGeneratorFileName(f.FileName); err != nil { return nil, err } content, err := f.render() if err != nil { return nil, err } return &plugin_go.CodeGeneratorResponse_File{ Content: proto.String(content), }, nil } // A GeneratorInjection Artifact inserts content into a protoc-generated file // at the specified insertion point. The target file does not need to be // generated by this protoc-plugin but must be generated by a prior plugin // executed by protoc. type GeneratorInjection struct { GeneratorArtifact // Filename of the file to inject into, relative to the protoc-plugin's // generation output directory. FileName string // The name of the insertion point to inject into InsertionPoint string // Contents to be inject into the file Contents string } // ProtoFile satisfies the GeneratorArtifact interface. An error is returned if // the name field is not a path relative to and within the protoc-plugin's // generation output directory. func (f GeneratorInjection) ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) { name, err := cleanGeneratorFileName(f.FileName) if err != nil { return nil, err } return &plugin_go.CodeGeneratorResponse_File{ Name: proto.String(name), InsertionPoint: proto.String(f.InsertionPoint), Content: proto.String(f.Contents), }, nil } // A GeneratorTemplateInjection Artifact inserts content rendered from a // Template into protoc-generated file at the specified insertion point. The // target file does not need to be generated by this protoc-plugin but must be // generated by a prior plugin executed by protoc. type GeneratorTemplateInjection struct { GeneratorArtifact TemplateArtifact // Filename of the file to inject into, relative to the protoc-plugin's // generation output directory. FileName string // The name of the insertion point to inject into InsertionPoint string } // ProtoFile satisfies the GeneratorArtifact interface. An error is returned if // the name field is not a path relative to and within the protoc-plugin's // generation output directory or if there is an error executing the Template. func (f GeneratorTemplateInjection) ProtoFile() (*plugin_go.CodeGeneratorResponse_File, error) { name, err := cleanGeneratorFileName(f.FileName) if err != nil { return nil, err } content, err := f.render() if err != nil { return nil, err } return &plugin_go.CodeGeneratorResponse_File{ Name: proto.String(name), InsertionPoint: proto.String(f.InsertionPoint), Content: proto.String(content), }, nil } // CustomFile Artifacts are files generated directly against the file system, // and do not use protoc for the generation. CustomFiles should be used over // GeneratorFiles when custom permissions need to be set (such as executable // scripts or read-only configs) or when the file needs to be created outside // of the protoc-plugin's generation output directory. type CustomFile struct { Artifact // Name of the file to generate. If relative, the file is created relative to // the directory in which protoc is executed. If absolute, the file is // created as specified. Name string // Contents are the body of the file. Contents string // Perms are the file permission to generate the file with. Note that the // umask of the process will be applied against these permissions. Perms os.FileMode // Overwrite indicates if an existing file on disk should be overwritten by // this file. Overwrite bool } // CustomTemplateFile Artifacts are files generated from a Template directly // against the file system, and do not use protoc for the generation. // CustomFiles should be used over GeneratorFiles when custom permissions need // to be set (such as executable scripts or read-only configs) or when the file // needs to be created outside of the protoc-plugin's generation output // directory. type CustomTemplateFile struct { Artifact TemplateArtifact // Name of the file to generate. If relative, the file is created relative to // the directory in which protoc is executed. If absolute, the file is // created as specified. Name string // Perms are the file permission to generate the file with. Note that the // umask of the process will be applied against these permissions. Perms os.FileMode // Overwrite indicates if an existing file on disk should be overwritten by // this file. Overwrite bool } func cleanGeneratorFileName(name string) (string, error) { if filepath.IsAbs(name) { return "", errors.New("generator file names must be relative paths") } if name = filepath.ToSlash(filepath.Clean(name)); name == "." || strings.HasPrefix(name, "..") { return "", errors.New("generator file names must be not contain . or .. within them") } return name, nil } // GeneratorError Artifacts are strings describing errors that happened in the // code generation, but have not been fatal. They'll be used to populate the // CodeGeneratorResponse's `error` field. Since that field is a string, multiple // GeneratorError Artifacts will be concatenated. type GeneratorError struct { Artifact Message string } protoc-gen-star-2.0.3/artifact_test.go000066400000000000000000000060441440740147700177370ustar00rootroot00000000000000package pgs import ( "testing" "text/template" "github.com/stretchr/testify/assert" ) var ( badArtifactTpl = template.Must(template.New("bad").Parse("{{ .NonExistentField }}")) artifactTpl = template.Must(template.New("foo").Parse("{{ . }}")) ) const fName = "foo" func TestGeneratorFile_ProtoFile(t *testing.T) { t.Parallel() f := GeneratorFile{ Name: "..", Contents: "bar", } pb, err := f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.Name = fName pb, err = f.ProtoFile() assert.NoError(t, err) assert.Equal(t, f.Name, pb.GetName()) assert.Equal(t, f.Contents, pb.GetContent()) } func TestGeneratorTemplateFile_ProtoFile(t *testing.T) { t.Parallel() f := GeneratorTemplateFile{ Name: ".", TemplateArtifact: TemplateArtifact{ Template: badArtifactTpl, Data: "bar", }, } pb, err := f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.Name = fName pb, err = f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.Template = artifactTpl pb, err = f.ProtoFile() assert.NoError(t, err) assert.Equal(t, f.Name, pb.GetName()) assert.Equal(t, "bar", pb.GetContent()) } func TestGeneratorAppend_ProtoFile(t *testing.T) { t.Parallel() f := GeneratorAppend{ FileName: ".", Contents: "bar", } pb, err := f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.FileName = fName pb, err = f.ProtoFile() assert.NoError(t, err) assert.Empty(t, pb.GetName()) assert.Equal(t, f.Contents, pb.GetContent()) } func TestGeneratorTemplateAppend_ProtoFile(t *testing.T) { t.Parallel() f := GeneratorTemplateAppend{ FileName: "/tmp", TemplateArtifact: TemplateArtifact{ Template: badArtifactTpl, Data: "bar", }, } pb, err := f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.FileName = fName pb, err = f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.Template = artifactTpl pb, err = f.ProtoFile() assert.NoError(t, err) assert.Empty(t, pb.GetName()) assert.Equal(t, "bar", pb.GetContent()) } func TestGeneratorInjection_ProtoFile(t *testing.T) { t.Parallel() f := GeneratorInjection{ FileName: "..", Contents: "bar", InsertionPoint: "baz", } pb, err := f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.FileName = fName pb, err = f.ProtoFile() assert.NoError(t, err) assert.Equal(t, f.FileName, pb.GetName()) assert.Equal(t, f.Contents, pb.GetContent()) assert.Equal(t, f.InsertionPoint, pb.GetInsertionPoint()) } func TestGeneratorTemplateInjection_ProtoFile(t *testing.T) { t.Parallel() f := GeneratorTemplateInjection{ FileName: ".", InsertionPoint: "baz", TemplateArtifact: TemplateArtifact{ Template: badArtifactTpl, Data: "bar", }, } pb, err := f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.FileName = fName pb, err = f.ProtoFile() assert.Error(t, err) assert.Nil(t, pb) f.Template = artifactTpl pb, err = f.ProtoFile() assert.NoError(t, err) assert.Equal(t, f.FileName, pb.GetName()) assert.Equal(t, "bar", pb.GetContent()) assert.Equal(t, f.InsertionPoint, pb.GetInsertionPoint()) } protoc-gen-star-2.0.3/ast.go000066400000000000000000000263271440740147700157000ustar00rootroot00000000000000package pgs import ( descriptor "google.golang.org/protobuf/types/descriptorpb" plugin_go "google.golang.org/protobuf/types/pluginpb" ) // AST encapsulates the entirety of the input CodeGeneratorRequest from protoc, // parsed to build the Entity graph used by PG*. type AST interface { // Targets returns a map of the files specified in the protoc execution. For // all Entities contained in these files, BuildTarget will return true. Targets() map[string]File // Packages returns all the imported packages (including those for the target // Files). This is limited to just the files that were imported by target // protos, either directly or transitively. Packages() map[string]Package // Lookup allows getting an Entity from the graph by its fully-qualified name // (FQN). The FQN uses dot notation of the form ".{package}.{entity}", or the // input path for Files. Lookup(name string) (Entity, bool) } type graph struct { d Debugger targets map[string]File packages map[string]Package entities map[string]Entity extensions []Extension } func (g *graph) Targets() map[string]File { return g.targets } func (g *graph) Packages() map[string]Package { return g.packages } func (g *graph) Lookup(name string) (Entity, bool) { e, ok := g.entities[name] return e, ok } // ProcessDescriptors is deprecated; use ProcessCodeGeneratorRequest instead func ProcessDescriptors(debug Debugger, req *plugin_go.CodeGeneratorRequest) AST { return ProcessCodeGeneratorRequest(debug, req) } // ProcessCodeGeneratorRequest converts a CodeGeneratorRequest from protoc into a fully // connected AST entity graph. An error is returned if the input is malformed. func ProcessCodeGeneratorRequest(debug Debugger, req *plugin_go.CodeGeneratorRequest) AST { g := &graph{ d: debug, targets: make(map[string]File, len(req.GetFileToGenerate())), packages: make(map[string]Package), entities: make(map[string]Entity), extensions: []Extension{}, } for _, f := range req.GetFileToGenerate() { g.targets[f] = nil } for _, f := range req.GetProtoFile() { pkg := g.hydratePackage(f) pkg.addFile(g.hydrateFile(pkg, f)) } for _, e := range g.extensions { e.addType(g.hydrateFieldType(e)) extendee := g.mustSeen(e.Descriptor().GetExtendee()).(Message) e.setExtendee(extendee) if extendee != nil { extendee.addExtension(e) } } return g } // ProcessCodeGeneratorRequestBidirectional has the same functionality as // ProcessCodeGeneratorRequest, but builds the AST so that files, messages, // and enums have references to any files or messages that directly or // transitively depend on them. func ProcessCodeGeneratorRequestBidirectional(debug Debugger, req *plugin_go.CodeGeneratorRequest) AST { g := ProcessCodeGeneratorRequest(debug, req) for _, pkg := range g.Packages() { for _, f := range pkg.Files() { for _, m := range f.AllMessages() { for _, field := range m.Fields() { assignDependent(field.Type(), m) } } } } return g } // ProcessFileDescriptorSet converts a FileDescriptorSet from protoc into a // fully connected AST entity graph. An error is returned if the input is // malformed or missing dependencies. To generate a self-contained // FileDescriptorSet, run the following command: // // protoc -o path/to/fdset.bin --include_imports $PROTO_FILES // // The emitted AST will have no values in the Targets map, but Packages will be // populated. If used for testing purposes, the Targets map can be manually // populated. func ProcessFileDescriptorSet(debug Debugger, fdset *descriptor.FileDescriptorSet) AST { req := plugin_go.CodeGeneratorRequest{ProtoFile: fdset.File} return ProcessCodeGeneratorRequest(debug, &req) } // ProcessFileDescriptorSetBidirectional has the same functionality as // ProcessFileDescriptorSet, but builds the AST so that files, messages, // and enums have references to any files or messages that directly or // transitively depend on them. func ProcessFileDescriptorSetBidirectional(debug Debugger, fdset *descriptor.FileDescriptorSet) AST { req := plugin_go.CodeGeneratorRequest{ProtoFile: fdset.File} return ProcessCodeGeneratorRequestBidirectional(debug, &req) } func (g *graph) hydratePackage(f *descriptor.FileDescriptorProto) Package { lookup := f.GetPackage() if pkg, exists := g.packages[lookup]; exists { return pkg } p := &pkg{fd: f} g.packages[lookup] = p return p } func (g *graph) hydrateFile(pkg Package, f *descriptor.FileDescriptorProto) File { fl := &file{ pkg: pkg, desc: f, } if pkg := f.GetPackage(); pkg != "" { fl.fqn = "." + pkg } else { fl.fqn = "" } g.add(fl) for _, dep := range f.GetDependency() { // the AST is built in topological order so a file's dependencies are always hydrated first d := g.mustSeen(dep).(File) fl.addFileDependency(d) d.addDependent(fl) } if _, fl.buildTarget = g.targets[f.GetName()]; fl.buildTarget { g.targets[f.GetName()] = fl } enums := f.GetEnumType() fl.enums = make([]Enum, 0, len(enums)) for _, e := range enums { fl.addEnum(g.hydrateEnum(fl, e)) } exts := f.GetExtension() fl.defExts = make([]Extension, 0, len(exts)) for _, ext := range exts { e := g.hydrateExtension(fl, ext) fl.addDefExtension(e) } msgs := f.GetMessageType() fl.msgs = make([]Message, 0, len(f.GetMessageType())) for _, msg := range msgs { fl.addMessage(g.hydrateMessage(fl, msg)) } srvs := f.GetService() fl.srvs = make([]Service, 0, len(srvs)) for _, sd := range srvs { fl.addService(g.hydrateService(fl, sd)) } for _, m := range fl.AllMessages() { for _, me := range m.MapEntries() { for _, fld := range me.Fields() { fld.addType(g.hydrateFieldType(fld)) } } for _, fld := range m.Fields() { fld.addType(g.hydrateFieldType(fld)) } } g.hydrateSourceCodeInfo(fl, f) return fl } func (g *graph) hydrateSourceCodeInfo(f File, fd *descriptor.FileDescriptorProto) { for _, loc := range fd.GetSourceCodeInfo().GetLocation() { info := sci{desc: loc} path := loc.GetPath() if len(path) == 1 { switch path[0] { case syntaxPath: f.addSourceCodeInfo(info) case packagePath: f.addPackageSourceCodeInfo(info) default: continue } } if e := f.childAtPath(path); e != nil { e.addSourceCodeInfo(info) } } } func (g *graph) hydrateEnum(p ParentEntity, ed *descriptor.EnumDescriptorProto) Enum { e := &enum{ desc: ed, parent: p, } e.fqn = fullyQualifiedName(p, e) g.add(e) vals := ed.GetValue() e.vals = make([]EnumValue, 0, len(vals)) for _, vd := range vals { e.addValue(g.hydrateEnumValue(e, vd)) } return e } func (g *graph) hydrateEnumValue(e Enum, vd *descriptor.EnumValueDescriptorProto) EnumValue { ev := &enumVal{ desc: vd, enum: e, } ev.fqn = fullyQualifiedName(e, ev) g.add(ev) return ev } func (g *graph) hydrateService(f File, sd *descriptor.ServiceDescriptorProto) Service { s := &service{ desc: sd, file: f, } s.fqn = fullyQualifiedName(f, s) g.add(s) for _, md := range sd.GetMethod() { s.addMethod(g.hydrateMethod(s, md)) } return s } func (g *graph) hydrateMethod(s Service, md *descriptor.MethodDescriptorProto) Method { m := &method{ desc: md, service: s, } m.fqn = fullyQualifiedName(s, m) g.add(m) m.in = g.mustSeen(md.GetInputType()).(Message) m.out = g.mustSeen(md.GetOutputType()).(Message) return m } func (g *graph) hydrateMessage(p ParentEntity, md *descriptor.DescriptorProto) Message { m := &msg{ desc: md, parent: p, } m.fqn = fullyQualifiedName(p, m) g.add(m) for _, ed := range md.GetEnumType() { m.addEnum(g.hydrateEnum(m, ed)) } m.preservedMsgs = make([]Message, len(md.GetNestedType())) for i, nmd := range md.GetNestedType() { nm := g.hydrateMessage(m, nmd) if nm.IsMapEntry() { m.addMapEntry(nm) } else { m.addMessage(nm) } m.preservedMsgs[i] = nm } for _, od := range md.GetOneofDecl() { m.addOneOf(g.hydrateOneOf(m, od)) } for _, fd := range md.GetField() { fld := g.hydrateField(m, fd) m.addField(fld) if idx := fld.Descriptor().OneofIndex; idx != nil { m.oneofs[*idx].addField(fld) } } exts := md.GetExtension() m.defExts = make([]Extension, 0, len(exts)) for _, ext := range md.GetExtension() { e := g.hydrateExtension(m, ext) m.addDefExtension(e) } return m } func (g *graph) hydrateField(m Message, fd *descriptor.FieldDescriptorProto) Field { f := &field{ desc: fd, msg: m, } f.fqn = fullyQualifiedName(f.msg, f) g.add(f) return f } func (g *graph) hydrateOneOf(m Message, od *descriptor.OneofDescriptorProto) OneOf { o := &oneof{ desc: od, msg: m, } o.fqn = fullyQualifiedName(m, o) g.add(o) return o } func (g *graph) hydrateExtension(parent ParentEntity, fd *descriptor.FieldDescriptorProto) Extension { ext := &ext{ parent: parent, } ext.desc = fd ext.fqn = fullyQualifiedName(parent, ext) g.add(ext) g.extensions = append(g.extensions, ext) return ext } func (g *graph) hydrateFieldType(fld Field) FieldType { s := &scalarT{fld: fld} switch { case s.ProtoType() == GroupT: g.d.Fail("group types are deprecated and unsupported. Use an embedded message instead.") return nil case s.ProtoLabel() == Repeated: return g.hydrateRepeatedFieldType(s) case s.ProtoType() == EnumT: return g.hydrateEnumFieldType(s) case s.ProtoType() == MessageT: return g.hydrateEmbedFieldType(s) default: return s } } func (g *graph) hydrateEnumFieldType(s *scalarT) FieldType { return &enumT{ scalarT: s, enum: g.mustSeen(s.fld.Descriptor().GetTypeName()).(Enum), } } func (g *graph) hydrateEmbedFieldType(s *scalarT) FieldType { return &embedT{ scalarT: s, msg: g.mustSeen(s.fld.Descriptor().GetTypeName()).(Message), } } func (g *graph) hydrateRepeatedFieldType(s *scalarT) FieldType { r := &repT{ scalarT: s, } r.el = &scalarE{ typ: r, ptype: r.ProtoType(), } switch s.ProtoType() { case EnumT: r.el = &enumE{ scalarE: r.el.(*scalarE), enum: g.mustSeen(s.fld.Descriptor().GetTypeName()).(Enum), } case MessageT: m := g.mustSeen(s.fld.Descriptor().GetTypeName()).(Message) if m.IsMapEntry() { return g.hydrateMapFieldType(r, m) } r.el = &embedE{ scalarE: r.el.(*scalarE), msg: m, } } return r } func (g *graph) hydrateMapFieldType(r *repT, m Message) FieldType { mt := &mapT{repT: r} mt.key = m.Fields()[0].Type().toElem() mt.key.setType(mt) mt.el = m.Fields()[1].Type().toElem() mt.el.setType(mt) return mt } func (g *graph) mustSeen(fqn string) Entity { if existing, seen := g.entities[fqn]; seen { return existing } g.d.Failf("expected entity %q has not been hydrated", fqn) return nil } func (g *graph) add(e Entity) { g.entities[g.resolveFQN(e)] = e } func (g *graph) resolveFQN(e Entity) string { if f, ok := e.(File); ok { return f.Name().String() } return e.FullyQualifiedName() } func assignDependent(ft FieldType, parent Message) { if ft.IsEnum() { ft.Enum().addDependent(parent) } else if ft.IsEmbed() { ft.Embed().addDependent(parent) } else if ft.IsRepeated() || ft.IsMap() { if ft.Element().IsEnum() { ft.Element().Enum().addDependent(parent) } else if ft.Element().IsEmbed() { ft.Element().Embed().addDependent(parent) } if ft.IsMap() { if ft.Key().IsEnum() { ft.Key().Enum().addDependent(parent) } else if ft.Key().IsEmbed() { ft.Key().Embed().addDependent(parent) } } } } var _ AST = (*graph)(nil) protoc-gen-star-2.0.3/ast_test.go000066400000000000000000000302641440740147700167320ustar00rootroot00000000000000package pgs import ( "io/ioutil" "path/filepath" "testing" descriptor "google.golang.org/protobuf/types/descriptorpb" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) func readCodeGenReq(t *testing.T, dir string) *plugin_go.CodeGeneratorRequest { filename := filepath.Join("testdata", "graph", dir, "code_generator_request.pb.bin") data, err := ioutil.ReadFile(filename) require.NoError(t, err, "unable to read CDR at %q", filename) req := &plugin_go.CodeGeneratorRequest{} err = proto.Unmarshal(data, req) require.NoError(t, err, "unable to unmarshal CDR data at %q", filename) return req } func readFileDescSet(t *testing.T, filename string) *descriptor.FileDescriptorSet { data, err := ioutil.ReadFile(filename) require.NoError(t, err, "unable to read FDS at %q", filename) fdset := &descriptor.FileDescriptorSet{} err = proto.Unmarshal(data, fdset) require.NoError(t, err, "unable to unmarshal FDS data at %q", filename) return fdset } func buildGraph(t *testing.T, dir string) AST { d := InitMockDebugger() ast := ProcessCodeGeneratorRequest(d, readCodeGenReq(t, dir)) require.False(t, d.Failed(), "failed to build graph (see previous log statements)") return ast } func TestGraph_FDSet(t *testing.T) { fdset := readFileDescSet(t, "testdata/fdset.bin") d := InitMockDebugger() ast := ProcessFileDescriptorSet(d, fdset) require.False(t, d.Failed(), "failed to build graph from FDSet") msg, found := ast.Lookup(".kitchen.Sink") assert.True(t, found) assert.Implements(t, (*Message)(nil), msg) } func TestGraph_Messages(t *testing.T) { t.Parallel() g := buildGraph(t, "messages") tests := []struct { lookup string fldCt int isMap, isRepeated, isEmbed, isEnum bool }{ { lookup: ".graph.messages.Scalars", fldCt: 15, }, { lookup: ".graph.messages.Embedded", fldCt: 6, isEmbed: true, }, { lookup: ".graph.messages.Enums", fldCt: 6, isEnum: true, }, { lookup: ".graph.messages.Repeated", fldCt: 13, isRepeated: true, }, { lookup: ".graph.messages.Maps", fldCt: 13, isMap: true, }, { lookup: ".graph.messages.Recursive", fldCt: 1, isEmbed: true, }, } for _, test := range tests { tc := test t.Run(tc.lookup, func(t *testing.T) { t.Parallel() ent, ok := g.Lookup(tc.lookup) require.True(t, ok, "unknown entity lookup") msg, ok := ent.(Message) require.True(t, ok, "entity is not a message") flds := msg.Fields() assert.Len(t, flds, tc.fldCt, "unexpected number of fields on the message") for _, fld := range flds { t.Run(fld.Name().String(), func(t *testing.T) { typ := fld.Type() assert.Equal(t, tc.isMap, typ.IsMap(), "should not be a map") assert.Equal(t, tc.isRepeated, typ.IsRepeated(), "should not be repeated") assert.Equal(t, tc.isEmbed, typ.IsEmbed(), "should not be embedded") assert.Equal(t, tc.isEnum, typ.IsEnum(), "should not be an enum") }) } }) } t.Run("oneof", func(t *testing.T) { t.Parallel() ent, ok := g.Lookup(".graph.messages.OneOfs") require.True(t, ok) msg, ok := ent.(Message) require.True(t, ok) flds := msg.Fields() oneofFlds := msg.OneOfFields() notOneofFlds := msg.NonOneOfFields() assert.Len(t, flds, 3) assert.Len(t, oneofFlds, 1) assert.Len(t, notOneofFlds, 2) oneofs := msg.OneOfs() require.Len(t, oneofs, 1) oo := oneofs[0] require.Len(t, oo.Fields(), 1) assert.Equal(t, int32(2), oo.Fields()[0].Descriptor().GetNumber()) assert.Equal(t, oneofFlds, oo.Fields()) }) } func TestGraph_Services(t *testing.T) { t.Parallel() g := buildGraph(t, "services") t.Run("empty", func(t *testing.T) { t.Parallel() ent, ok := g.Lookup(".graph.services.Empty") require.True(t, ok) svc, ok := ent.(Service) require.True(t, ok) assert.Empty(t, svc.Methods()) }) t.Run("unary", func(t *testing.T) { t.Parallel() ent, ok := g.Lookup(".graph.services.Unary") require.True(t, ok) svc, ok := ent.(Service) require.True(t, ok) mtds := svc.Methods() assert.Len(t, mtds, 2) for _, mtd := range mtds { assert.False(t, mtd.ClientStreaming(), mtd.FullyQualifiedName()) assert.False(t, mtd.ServerStreaming(), mtd.FullyQualifiedName()) } }) t.Run("streaming", func(t *testing.T) { t.Parallel() ent, ok := g.Lookup(".graph.services.Streaming") require.True(t, ok) svc, ok := ent.(Service) require.True(t, ok) mtds := svc.Methods() assert.Len(t, mtds, 3) tests := []struct{ client, server bool }{ {true, false}, {false, true}, {true, true}, } for i, mtd := range mtds { assert.Equal(t, tests[i].client, mtd.ClientStreaming(), mtd.FullyQualifiedName()) assert.Equal(t, tests[i].server, mtd.ServerStreaming(), mtd.FullyQualifiedName()) } }) } func TestGraph_SourceCodeInfo(t *testing.T) { t.Parallel() g := buildGraph(t, "info") tests := map[string]string{ "Info": "root message", "Info.Before": "before message", "Info.BeforeEnum.BEFORE": "before enum value", "Info.field": "field", "Info.Middle": "middle message", "Info.Middle.inner": "inner field", "Info.other_field": "other field", "Info.After": "after message", "Info.AfterEnum": "after enum", "Info.AfterEnum.AFTER": "after enum value", "Info.OneOf": "oneof", "Info.oneof_field": "oneof field", "Enum": "root enum comment", "Enum.ROOT": "root enum value", "Service": "service", "Service.Method": "method", } for lookup, expected := range tests { t.Run(lookup, func(t *testing.T) { lo := ".graph.info." + lookup ent, ok := g.Lookup(lo) require.True(t, ok, "cannot find entity: %s", lo) info := ent.SourceCodeInfo() require.NotNil(t, info, "source code info is nil") assert.Contains(t, info.LeadingComments(), expected, "invalid leading comment") }) } t.Run("file", func(t *testing.T) { f, ok := g.Targets()["info/info.proto"] require.True(t, ok, "cannot find file") info := f.SyntaxSourceCodeInfo() require.NotNil(t, info, "no source code info on syntax") assert.Contains(t, info.LeadingComments(), "syntax") assert.Equal(t, info, f.SourceCodeInfo(), "SourceCodeInfo should return SyntaxSourceCodeInfo") info = f.PackageSourceCodeInfo() require.NotNil(t, info, "no source code info on package") assert.Contains(t, info.LeadingComments(), "package") }) } func TestGraph_MustSeen(t *testing.T) { t.Parallel() md := InitMockDebugger() g := &graph{ d: md, entities: make(map[string]Entity), } f := dummyFile() g.add(f) assert.Equal(t, f, g.mustSeen(g.resolveFQN(f))) assert.Nil(t, g.mustSeen(".foo.bar.baz")) assert.True(t, md.Failed()) } func TestGraph_HydrateFieldType_Group(t *testing.T) { t.Parallel() md := InitMockDebugger() g := &graph{d: md} f := dummyField() f.Descriptor().Type = GroupT.ProtoPtr() assert.Nil(t, g.hydrateFieldType(f)) assert.True(t, md.Failed()) } func TestGraph_Packageless(t *testing.T) { t.Parallel() g := buildGraph(t, "packageless") tests := []struct { name string entityIFace interface{} }{ {".RootMessage", (*Message)(nil)}, {".RootEnum", (*Enum)(nil)}, {".RootMessage.field", (*Field)(nil)}, {".RootEnum.VALUE", (*EnumValue)(nil)}, {".RootMessage.NestedMsg", (*Message)(nil)}, {".RootMessage.NestedEnum", (*Enum)(nil)}, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { ent, ok := g.Lookup(tc.name) assert.True(t, ok) assert.NotNil(t, ent) assert.Implements(t, tc.entityIFace, ent) }) } } func TestGraph_Extensions(t *testing.T) { t.Parallel() g := buildGraph(t, "extensions") assert.NotNil(t, g) ent, ok := g.Lookup("extensions/ext/data.proto") assert.True(t, ok) assert.NotNil(t, ent.(File).DefinedExtensions()) assert.Len(t, ent.(File).DefinedExtensions(), 6) ent, ok = g.Lookup("extensions/everything.proto") assert.True(t, ok) assert.NotNil(t, ent.(File).DefinedExtensions()) assert.Len(t, ent.(File).Imports(), 4) ent, ok = g.Lookup(".extensions.Request") assert.True(t, ok) assert.NotNil(t, ent.(Message).DefinedExtensions()) assert.Len(t, ent.(Message).DefinedExtensions(), 1) ent, ok = g.Lookup(".google.protobuf.MessageOptions") assert.True(t, ok) assert.NotNil(t, ent.(Message).Extensions()) assert.Len(t, ent.(Message).Extensions(), 1) } func TestGraph_Bidirectional(t *testing.T) { t.Parallel() fdset := readFileDescSet(t, "testdata/fdset.bin") d := InitMockDebugger() ast := ProcessFileDescriptorSetBidirectional(d, fdset) require.False(t, d.Failed(), "failed to build graph from FDSet") t.Run("nested", func(t *testing.T) { t.Parallel() finish, ok := ast.Lookup(".kitchen.Sink.Material.Finish") require.True(t, ok) deps := finish.(Enum).Dependents() kitchen, ok := ast.Lookup(".kitchen.Kitchen") require.True(t, ok) sink, ok := ast.Lookup(".kitchen.Sink") require.True(t, ok) require.Len(t, deps, 3) assert.Contains(t, deps, finish.(Enum).Parent()) assert.Contains(t, deps, sink) assert.Contains(t, deps, kitchen) }) t.Run("files", func(t *testing.T) { t.Parallel() timestamp, ok := ast.Lookup("google/protobuf/timestamp.proto") require.True(t, ok) deps := timestamp.(File).Dependents() sinkProto, ok := ast.Lookup("kitchen/sink.proto") require.True(t, ok) kitchenProto, ok := ast.Lookup("kitchen/kitchen.proto") require.True(t, ok) assert.Len(t, deps, 2) assert.Contains(t, deps, sinkProto) assert.Contains(t, deps, kitchenProto) }) } func TestGraph_Bidirectional_Messages_Enums(t *testing.T) { t.Parallel() d := InitMockDebugger() graph := ProcessCodeGeneratorRequestBidirectional(d, readCodeGenReq(t, "messages")) require.False(t, d.Failed(), "failed to build graph (see previous log statements)") t.Run("repeated", func(t *testing.T) { t.Parallel() beforeRepMsg, ok := graph.Lookup(".graph.messages.BeforeRepMsg") require.True(t, ok) repeated, ok := graph.Lookup(".graph.messages.Repeated") require.True(t, ok) deps := beforeRepMsg.(Message).Dependents() require.Len(t, deps, 1) assert.Contains(t, deps, repeated) beforeRepEnum, ok := graph.Lookup(".graph.messages.BeforeRepEnum") require.True(t, ok) deps = beforeRepEnum.(Enum).Dependents() require.Len(t, deps, 1) assert.Contains(t, deps, repeated) }) t.Run("message cycle", func(t *testing.T) { t.Parallel() recursiveMsg, ok := graph.Lookup(".graph.messages.Recursive") require.True(t, ok) assert.Empty(t, recursiveMsg.(Message).Dependents()) }) t.Run("maps", func(t *testing.T) { t.Parallel() beforeMapMsg, ok := graph.Lookup(".graph.messages.BeforeMapMsg") require.True(t, ok) maps, ok := graph.Lookup(".graph.messages.Maps") require.True(t, ok) deps := beforeMapMsg.(Message).Dependents() require.Len(t, deps, 1) assert.Contains(t, deps, maps) beforeMapEnum, ok := graph.Lookup(".graph.messages.BeforeMapEnum") require.True(t, ok) deps = beforeMapEnum.(Enum).Dependents() require.Len(t, deps, 1) assert.Contains(t, deps, maps) }) } func TestGraph_Bidirectional_Recursive(t *testing.T) { t.Parallel() d := InitMockDebugger() graph := ProcessCodeGeneratorRequestBidirectional(d, readCodeGenReq(t, "messages")) require.False(t, d.Failed(), "failed to build graph (see previous log statements)") tests := []struct { fqn string expected []string }{ { fqn: ".graph.messages.Recursive", expected: []string{}, }, { fqn: ".graph.messages.Circular.Rock", expected: []string{ ".graph.messages.Circular.Paper", ".graph.messages.Circular.Scissors"}, }, { fqn: ".graph.messages.RepeatedRecursive", expected: []string{}, }, } for _, test := range tests { tc := test t.Run(tc.fqn, func(t *testing.T) { t.Parallel() m, ok := graph.Lookup(tc.fqn) require.True(t, ok) deps := m.(Message).Dependents() require.Len(t, deps, len(tc.expected), "wanted %v, but got %v", tc.expected, deps) set := make(map[string]bool) for _, name := range tc.expected { set[name] = true } for _, dep := range deps { assert.Contains(t, set, dep.FullyQualifiedName()) } }) } } protoc-gen-star-2.0.3/build_context.go000066400000000000000000000125651440740147700177530ustar00rootroot00000000000000package pgs import "path/filepath" // BuildContext tracks code generation relative to an output path. By default, // BuildContext's path is relative to the output location specified when // executing protoc (an absolute path to this location is not available within // protoc plugins). Specifying a custom output path permits using an absolute // path and or a different location from protoc's designated output location. type BuildContext interface { DebuggerCommon // OutputPath is the path where files should be generated to. This path may // be relative or absolute, if it is relative, the path is based off the // (unknown) output destination specified during execution of protoc. If it // is absolute, the path may be outside of the target directory for protoc. OutputPath() string // JoinPath returns name relative to the value of OutputPath. JoinPath(name ...string) string // Push adds an arbitrary prefix to the Debugger output. The Outpath value is // unchanged. Push(prefix string) BuildContext // PushDir changes the BuildContext's OutputPath to dir. If dir is relative, // it is applied relative to the current value of OutputPath. PushDir(dir string) BuildContext // Pop returns the previous state of the BuildContext. This may or may not // change the value of OutputPath. This method will cause the plugin to fail // if the root context is popped. Pop() BuildContext // PopDir behaves like Pop but returns the last previous state of OutputPath, // skipping over any prefix changes in-between. If at the root context, this // method will always return the root context. PopDir() BuildContext // Parameters returns the command line parameters passed in from protoc, // mutated with any provided ParamMutators via InitOptions. Parameters() Parameters } // Context creates a new BuildContext with the provided debugger and initial // output path. For protoc-gen-go plugins, output is typically ".", while // Module's may use a custom path. func Context(d Debugger, params Parameters, output string) BuildContext { return rootContext{ dirContext: dirContext{ prefixContext: prefixContext{parent: nil, d: d}, p: filepath.Clean(output), }, params: params, } } func initPrefixContext(c BuildContext, d Debugger, prefix string) prefixContext { return prefixContext{ parent: c, d: d.Push(prefix), } } func (c prefixContext) Log(v ...interface{}) { c.d.Log(v...) } func (c prefixContext) Logf(format string, v ...interface{}) { c.d.Logf(format, v...) } func (c prefixContext) Debug(v ...interface{}) { c.d.Debug(v...) } func (c prefixContext) Debugf(format string, v ...interface{}) { c.d.Debugf(format, v...) } func (c prefixContext) Fail(v ...interface{}) { c.d.Fail(v...) } func (c prefixContext) Failf(format string, v ...interface{}) { c.d.Failf(format, v...) } func (c prefixContext) CheckErr(err error, v ...interface{}) { c.d.CheckErr(err, v...) } func (c prefixContext) Assert(expr bool, v ...interface{}) { c.d.Assert(expr, v...) } func (c prefixContext) Exit(code int) { c.d.Exit(code) } func (c prefixContext) Parameters() Parameters { return c.parent.Parameters() } func (c prefixContext) OutputPath() string { return c.parent.OutputPath() } func (c prefixContext) JoinPath(name ...string) string { return c.parent.JoinPath(name...) } func (c prefixContext) PushDir(dir string) BuildContext { return initDirContext(c, c.d, dir) } func (c prefixContext) Push(prefix string) BuildContext { return initPrefixContext(c, c.d, prefix) } func (c prefixContext) Pop() BuildContext { return c.parent } func (c prefixContext) PopDir() BuildContext { return c.parent.PopDir() } type dirContext struct { prefixContext p string } func initDirContext(c BuildContext, d Debugger, dir string) dirContext { dc := dirContext{ prefixContext: prefixContext{parent: c, d: d}, p: filepath.Clean(dir), } c.Debug("push:", dc.parent.OutputPath(), "→", dc.OutputPath()) return dc } func (c dirContext) OutputPath() string { return filepath.Join(c.parent.OutputPath(), c.p) } func (c dirContext) PushDir(dir string) BuildContext { return initDirContext(c, c.d, dir) } func (c dirContext) Push(prefix string) BuildContext { return initPrefixContext(c, c.d, prefix) } func (c dirContext) PopDir() BuildContext { return c.Pop() } func (c dirContext) Pop() BuildContext { c.Debug("pop:", c.OutputPath(), "→", c.parent.OutputPath()) return c.parent } func (c dirContext) JoinPath(name ...string) string { return filepath.Join(append([]string{c.OutputPath()}, name...)...) } type prefixContext struct { parent BuildContext d Debugger } type rootContext struct { dirContext params Parameters } func (c rootContext) OutputPath() string { return c.p } func (c rootContext) PushDir(dir string) BuildContext { return initDirContext(c, c.d, dir) } func (c rootContext) Push(prefix string) BuildContext { return initPrefixContext(c, c.d, prefix) } func (c rootContext) Parameters() Parameters { return c.params } func (c rootContext) PopDir() BuildContext { return c } func (c rootContext) Pop() BuildContext { c.Fail("attempted to pop the root build context") return nil } func (c rootContext) JoinPath(name ...string) string { return filepath.Join(append([]string{c.OutputPath()}, name...)...) } protoc-gen-star-2.0.3/build_context_test.go000066400000000000000000000123661440740147700210110ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" ) func TestPrefixContext_Log(t *testing.T) { t.Parallel() l := newMockLogger() c := initPrefixContext(nil, &rootDebugger{l: l}, "foo") c.Log("bar") assert.Equal(t, "[foo] bar\n", l.buf.String()) } func TestPrefixContext_Logf(t *testing.T) { t.Parallel() l := newMockLogger() c := initPrefixContext(nil, &rootDebugger{l: l}, "foo") c.Logf("bar %s", "baz") assert.Equal(t, "[foo] bar baz\n", l.buf.String()) } func TestPrefixContext_Debug(t *testing.T) { t.Parallel() l := newMockLogger() c := initPrefixContext(nil, &rootDebugger{l: l, logDebugs: true}, "foo") c.Debug("bar") assert.Equal(t, "[foo] bar\n", l.buf.String()) } func TestPrefixContext_Debugf(t *testing.T) { t.Parallel() l := newMockLogger() c := initPrefixContext(nil, &rootDebugger{l: l, logDebugs: true}, "foo") c.Debugf("bar %s", "baz") assert.Equal(t, "[foo] bar baz\n", l.buf.String()) } func TestPrefixContext_Fail(t *testing.T) { t.Parallel() d := InitMockDebugger() c := initPrefixContext(nil, d, "foo") c.Fail("bar") assert.True(t, d.Failed()) } func TestPrefixContext_Failf(t *testing.T) { t.Parallel() d := InitMockDebugger() c := initPrefixContext(nil, d, "foo") c.Failf("bar %s", "baz") assert.True(t, d.Failed()) } func TestPrefixContext_CheckErr(t *testing.T) { t.Parallel() d := InitMockDebugger() c := initPrefixContext(nil, d, "foo") c.CheckErr(nil) assert.False(t, d.Failed()) err := errors.New("bar") c.CheckErr(err) assert.True(t, d.Exited()) assert.Equal(t, d.Err(), err) } func TestPrefixContext_Assert(t *testing.T) { t.Parallel() d := InitMockDebugger() c := initPrefixContext(nil, d, "foo") c.Assert(true) assert.False(t, d.Failed()) c.Assert(false) assert.True(t, d.Failed()) } func TestPrefixContext_OutputPath(t *testing.T) { t.Parallel() d := Context(InitMockDebugger(), Parameters{}, "foo/bar") c := initPrefixContext(d, InitMockDebugger(), "") assert.Equal(t, c.OutputPath(), d.OutputPath()) } func TestPrefixContext_PushPop(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo/bar") p := initPrefixContext(r, InitMockDebugger(), "baz") c := p.Push("fizz") assert.IsType(t, prefixContext{}, c) assert.IsType(t, rootContext{}, c.Pop().Pop()) } func TestPrefixContext_PushPopDir(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo/bar") p := initPrefixContext(r, InitMockDebugger(), "fizz") c := p.PushDir("baz") assert.Equal(t, "foo/bar/baz", c.OutputPath()) assert.Equal(t, "foo/bar", c.Push("buzz").PopDir().OutputPath()) } func TestPrefixContext_Parameters(t *testing.T) { t.Parallel() p := Parameters{"foo": "bar"} r := Context(InitMockDebugger(), p, ".") c := r.Push("foo") assert.Equal(t, p, c.Parameters()) } func TestDirContext_OutputPath(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo/bar") d := initDirContext(r, InitMockDebugger(), "baz") assert.Equal(t, "foo/bar/baz", d.OutputPath()) } func TestDirContext_Push(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo/bar") d := initDirContext(r, InitMockDebugger(), "baz") c := d.Push("fizz") assert.Equal(t, d.OutputPath(), c.OutputPath()) assert.IsType(t, prefixContext{}, c) } func TestDirContext_PushPopDir(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo") d := initDirContext(r, InitMockDebugger(), "bar") c := d.PushDir("baz") assert.Equal(t, "foo/bar/baz", c.OutputPath()) c = c.PopDir() assert.Equal(t, "foo/bar", c.OutputPath()) c = c.PopDir() assert.Equal(t, "foo", c.OutputPath()) } func TestRootContext_OutputPath(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo") assert.Equal(t, "foo", r.OutputPath()) } func TestRootContext_PushPop(t *testing.T) { t.Parallel() d := InitMockDebugger() r := Context(d, Parameters{}, "foo") c := r.Push("bar") assert.Equal(t, "foo", c.OutputPath()) c = c.Pop() assert.False(t, d.Failed()) c.Pop() assert.True(t, d.Failed()) } func TestRootContext_PushPopDir(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo") c := r.PushDir("bar") assert.Equal(t, "foo/bar", c.OutputPath()) c = c.PopDir() assert.Equal(t, "foo", c.OutputPath()) c = c.PopDir() assert.Equal(t, "foo", c.OutputPath()) } func TestRootContext_Parameters(t *testing.T) { t.Parallel() p := Parameters{"foo": "bar"} r := Context(InitMockDebugger(), p, "foo") assert.Equal(t, p, r.Parameters()) } func TestRootContext_JoinPath(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo") assert.Equal(t, "foo/bar", r.JoinPath("bar")) } func TestDirContext_JoinPath(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo") c := r.PushDir("bar") assert.Equal(t, "foo/bar/baz", c.JoinPath("baz")) } func TestPrefixContext_JoinPath(t *testing.T) { t.Parallel() r := Context(InitMockDebugger(), Parameters{}, "foo") c := r.Push("baz") assert.Equal(t, "foo/bar", c.JoinPath("bar")) } func TestPrefixContext_Exit(t *testing.T) { t.Parallel() d := InitMockDebugger() r := Context(d, Parameters{}, "") r.Exit(123) assert.True(t, d.Exited()) assert.Equal(t, 123, d.ExitCode()) } protoc-gen-star-2.0.3/comment.go000066400000000000000000000027531440740147700165500ustar00rootroot00000000000000package pgs import ( "bufio" "bytes" "fmt" "strings" "unicode" "unicode/utf8" ) const commentPrefix = "//" // C returns a comment block, wrapping when the line's length will exceed wrap. func C(wrap int, args ...interface{}) string { s := commentScanner(wrap, args...) buf := &bytes.Buffer{} for s.Scan() { fmt.Fprintln(buf, commentPrefix, s.Text()) } return buf.String() } // C80 is an alias for C(80, args...) func C80(args ...interface{}) string { return C(80, args...) } func commentScanner(wrap int, args ...interface{}) *bufio.Scanner { s := bufio.NewScanner(strings.NewReader(fmt.Sprint(args...))) s.Split(splitComment(wrap - 3)) return s } func splitComment(w int) bufio.SplitFunc { return func(data []byte, atEOF bool) (advance int, token []byte, err error) { var r rune start := 0 lastSpace := 0 for width := 0; start < len(data); start += width { r, width = utf8.DecodeRune(data[start:]) if !unicode.IsSpace(r) { break } } for width, i := 0, start; i < len(data); i += width { r, width = utf8.DecodeRune(data[i:]) if unicode.IsSpace(r) { if i >= w { // we are at our max comment width if lastSpace == 0 { // the token cannot be broken down further, allow it to break the limit return i + width, data[start:i], nil } return lastSpace, data[start:lastSpace], nil } lastSpace = i } } if atEOF && len(data) > start { return len(data), bytes.TrimSpace(data[start:]), nil } return start, nil, nil } } protoc-gen-star-2.0.3/comment_test.go000066400000000000000000000017351440740147700176060ustar00rootroot00000000000000package pgs import ( "strconv" "strings" "testing" "github.com/stretchr/testify/assert" ) func TestC(t *testing.T) { t.Parallel() tests := []struct { in []interface{} ex string }{ { []interface{}{"foo", " bar", " baz"}, "// foo bar baz\n", }, { in: []interface{}{"the quick brown fox jumps over the lazy dog"}, ex: "// the quick brown\n// fox jumps over\n// the lazy dog\n", }, { in: []interface{}{"supercalifragilisticexpialidocious"}, ex: "// supercalifragilisticexpialidocious\n", }, { in: []interface{}{"1234567890123456789012345 foo"}, ex: "// 1234567890123456789012345\n// foo\n", }, } for i, test := range tests { tc := test t.Run(strconv.Itoa(i), func(t *testing.T) { assert.Equal(t, tc.ex, C(20, tc.in...)) }) } } func TestC80(t *testing.T) { t.Parallel() ex := "// foo foo foo foo foo foo foo foo foo foo foo foo foo foo foo foo foo foo foo\n// foo\n" assert.Equal(t, ex, C80(strings.Repeat("foo ", 20))) } protoc-gen-star-2.0.3/debug.go000066400000000000000000000161761440740147700162000ustar00rootroot00000000000000package pgs import ( "bytes" "fmt" "io" "log" "os" "strings" ) // DebuggerCommon contains shared features of Debugger and Debugger-like types // (such as BuildContext). type DebuggerCommon interface { // Log writes v to the underlying logging location (typically, os.Stderr). It // uses the same behavior as log.Print, with all prefixes already attached. Log(v ...interface{}) // Logf formats v and writes it to the underlying logging location // (typically, os.Stderr). It uses the same behavior as log.Printf, with all // prefixes already attached. Logf(format string, v ...interface{}) // Debug behaves the same as Log, but only writes its output if debugging is // enabled for this Debugger. Debug(v ...interface{}) // Debugf behaves the same as Logf, but only writes its output if debugging // is enabled for this Debugger. Debugf(format string, v ...interface{}) // Fail behaves the same as Log, but also terminates the process. This method // should be used if an un-recoverable error is encountered. Fail(v ...interface{}) // Failf behaves the same as Logf, but also terminates the process. This // method should be used if an un-recoverable error is encountered. Failf(format string, v ...interface{}) // CheckErr ensures that err is nil. If err is not nil, Fail is called with // err and the provided v. CheckErr(err error, v ...interface{}) // Assert ensures that expr evaluates to true. If expr is false, Fail is // called with the provided v. Assert(expr bool, v ...interface{}) // Exit should terminate the current process with the provided code. Exit(code int) } // A Debugger provides utility methods to provide context-aware logging, // error-checking, and assertions. The Debugger is used extensively within the // protoc-gen-star generator, and is provided in a module's build context. type Debugger interface { DebuggerCommon // Push returns a new Debugger with the provided prefix. When entering a new // context, this method should be used. Push(prefix string) Debugger // Pop returns the parent for the current Debugger. When exiting a context, // this method should be used. Pop() Debugger } type logger interface { Println(...interface{}) Printf(string, ...interface{}) } type errFunc func(err error, msgs ...interface{}) type failFunc func(msgs ...interface{}) type exitFunc func(code int) type rootDebugger struct { err errFunc fail failFunc exit exitFunc l logger logDebugs bool } func initDebugger(d bool, l logger) Debugger { rd := rootDebugger{ logDebugs: d, l: l, exit: os.Exit, } rd.fail = failFunc(rd.defaultFail) rd.err = errFunc(rd.defaultErr) return rd } func (d rootDebugger) defaultErr(err error, msg ...interface{}) { if err != nil { d.l.Printf("[error] %s: %v\n", fmt.Sprint(msg...), err) d.exit(1) } } func (d rootDebugger) defaultFail(msg ...interface{}) { d.l.Println(msg...) d.exit(1) } func (d rootDebugger) Log(v ...interface{}) { d.l.Println(v...) } func (d rootDebugger) Logf(format string, v ...interface{}) { d.l.Printf(format, v...) } func (d rootDebugger) Fail(v ...interface{}) { d.fail(fmt.Sprint(v...)) } func (d rootDebugger) Failf(format string, v ...interface{}) { d.fail(fmt.Sprintf(format, v...)) } func (d rootDebugger) Exit(code int) { d.exit(code) } func (d rootDebugger) Debug(v ...interface{}) { if d.logDebugs { d.Log(v...) } } func (d rootDebugger) Debugf(format string, v ...interface{}) { if d.logDebugs { d.Logf(format, v...) } } func (d rootDebugger) CheckErr(err error, v ...interface{}) { if err != nil { d.err(err, fmt.Sprint(v...)) } } func (d rootDebugger) Assert(expr bool, v ...interface{}) { if !expr { d.Fail(fmt.Sprint(v...)) } } func (d rootDebugger) Push(prefix string) Debugger { return prefixedDebugger{ parent: d, prefix: fmt.Sprintf("[%s]", prefix), } } func (d rootDebugger) Pop() Debugger { d.Fail("attempted to pop the root debugger") return nil } type prefixedDebugger struct { parent Debugger prefix string } func (d prefixedDebugger) prepend(v []interface{}) []interface{} { return append([]interface{}{d.prefix}, v...) } func (d prefixedDebugger) prependFormat(format string) string { if strings.HasPrefix(format, "[") { return d.prefix + format } return d.prefix + " " + format } func (d prefixedDebugger) Log(v ...interface{}) { d.parent.Log(d.prepend(v)...) } func (d prefixedDebugger) Logf(format string, v ...interface{}) { d.parent.Logf(d.prependFormat(format), v...) } func (d prefixedDebugger) Debug(v ...interface{}) { d.parent.Debug(d.prepend(v)...) } func (d prefixedDebugger) Debugf(format string, v ...interface{}) { d.parent.Debugf(d.prependFormat(format), v...) } func (d prefixedDebugger) Fail(v ...interface{}) { d.parent.Fail(d.prepend(v)...) } func (d prefixedDebugger) Failf(format string, v ...interface{}) { d.parent.Failf(d.prependFormat(format), v...) } func (d prefixedDebugger) CheckErr(err error, v ...interface{}) { d.parent.CheckErr(err, d.prepend(v)...) } func (d prefixedDebugger) Assert(expr bool, v ...interface{}) { d.parent.Assert(expr, d.prepend(v)...) } func (d prefixedDebugger) Exit(code int) { d.parent.Exit(code) } func (d prefixedDebugger) Push(prefix string) Debugger { return prefixedDebugger{ parent: d, prefix: "[" + prefix + "]", } } func (d prefixedDebugger) Pop() Debugger { return d.parent } // MockDebugger serves as a root Debugger instance for usage in tests. Unlike // an actual Debugger, MockDebugger will not exit the program, but will track // failures, checked errors, and exit codes. type MockDebugger interface { Debugger // Output returns a reader of all logged data. Output() io.Reader // Failed returns true if Fail or Failf has been called on this debugger or a // descendant of it (via Push). Failed() bool // Err returns the error passed to CheckErr. Err() error // Exited returns true if this Debugger (or a descendant of it) would have // called os.Exit. Exited() bool // ExitCode returns the code this Debugger (or a descendant of it) passed to // os.Exit. If Exited() returns false, this value is meaningless. ExitCode() int } type mockDebugger struct { Debugger buf bytes.Buffer failed bool err error exited bool code int } // InitMockDebugger creates a new MockDebugger for usage in tests. func InitMockDebugger() MockDebugger { md := &mockDebugger{} d := initDebugger(true, log.New(&md.buf, "", 0)).(rootDebugger) d.fail = func(msgs ...interface{}) { md.failed = true d.defaultFail(msgs...) } d.err = func(err error, msgs ...interface{}) { if err != nil { md.err = err } d.defaultErr(err, msgs...) } d.exit = func(code int) { md.exited = true md.code = code } md.Debugger = d return md } func (d *mockDebugger) Output() io.Reader { return &d.buf } func (d *mockDebugger) Failed() bool { return d.failed } func (d *mockDebugger) Err() error { return d.err } func (d *mockDebugger) Exited() bool { return d.exited } func (d *mockDebugger) ExitCode() int { return d.code } var ( _ Debugger = rootDebugger{} _ Debugger = prefixedDebugger{} _ MockDebugger = &mockDebugger{} ) protoc-gen-star-2.0.3/debug_test.go000066400000000000000000000164221440740147700172310ustar00rootroot00000000000000package pgs import ( "bytes" "fmt" "io/ioutil" "testing" "errors" "github.com/stretchr/testify/assert" ) type mockLogger struct { buf *bytes.Buffer } func newMockLogger() *mockLogger { return &mockLogger{&bytes.Buffer{}} } func (l *mockLogger) Println(v ...interface{}) { fmt.Fprintln(l.buf, v...) } func (l *mockLogger) Printf(format string, v ...interface{}) { fmt.Fprintln(l.buf, fmt.Sprintf(format, v...)) } func TestRootDebugger_Log(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} rd.Log("foo", "bar") assert.Equal(t, "foo bar\n", l.buf.String()) } func TestRootDebugger_Logf(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} rd.Logf("foo%s", "bar") assert.Equal(t, "foobar\n", l.buf.String()) } func TestRootDebugger_Fail(t *testing.T) { t.Parallel() var failed bool fail := func(msgs ...interface{}) { assert.Equal(t, "foobar", msgs[0]) failed = true } rd := rootDebugger{l: newMockLogger(), fail: fail} rd.Fail("foo", "bar") assert.True(t, failed) } func TestRootDebugger_Failf(t *testing.T) { t.Parallel() var failed bool fail := func(msgs ...interface{}) { assert.Equal(t, "fizz buzz", msgs[0]) failed = true } rd := rootDebugger{l: newMockLogger(), fail: fail} rd.Failf("fizz %s", "buzz") assert.True(t, failed) } func TestRootDebugger_Debug(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} rd.Debug("foo") assert.Empty(t, l.buf.String()) rd.logDebugs = true rd.Debug("bar") assert.Contains(t, l.buf.String(), "bar") } func TestRootDebugger_Debugf(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} rd.Debug("foo") assert.Empty(t, l.buf.String()) rd.logDebugs = true rd.Debug("bar") assert.Contains(t, l.buf.String(), "bar") } func TestRootDebugger_CheckErr(t *testing.T) { t.Parallel() e := errors.New("bad error") errd := false errfn := func(err error, msg ...interface{}) { assert.Equal(t, e, err) assert.Equal(t, "foo", msg[0]) errd = true } rd := rootDebugger{err: errfn} rd.CheckErr(nil, "fizz") assert.False(t, errd) rd.CheckErr(e, "foo") assert.True(t, errd) } func TestRootDebugger_Assert(t *testing.T) { t.Parallel() failed := false fail := func(msgs ...interface{}) { assert.Equal(t, "foo", msgs[0]) failed = true } rd := rootDebugger{fail: fail} rd.Assert(true, "fizz") assert.False(t, failed) rd.Assert(false, "foo") assert.True(t, failed) } func TestRootDebugger_Exit(t *testing.T) { t.Parallel() var code int rd := rootDebugger{exit: func(c int) { code = c }} rd.Exit(123) assert.Equal(t, 123, code) } func TestRootDebugger_Push(t *testing.T) { t.Parallel() rd := rootDebugger{} d := rd.Push("foo") assert.NotNil(t, d) assert.NotEqual(t, rd, d) } func TestRootDebugger_Pop(t *testing.T) { t.Parallel() rd := rootDebugger{} assert.Panics(t, func() { rd.Pop() }) } func TestRootDebugger_DefaultErr(t *testing.T) { t.Parallel() exited := false code := 0 l := newMockLogger() rd := rootDebugger{ l: l, exit: func(c int) { code = c exited = true }, } rd.defaultErr(nil, "nothing") assert.False(t, exited) assert.Empty(t, l.buf.String()) rd.defaultErr(errors.New("some error"), "something") assert.True(t, exited) assert.Equal(t, 1, code) assert.Contains(t, l.buf.String(), "something") } func TestRootDebugger_DefaultFail(t *testing.T) { t.Parallel() exited := false code := 0 l := newMockLogger() rd := rootDebugger{ l: l, exit: func(c int) { code = c exited = true }, } rd.defaultFail("something") assert.True(t, exited) assert.Equal(t, 1, code) assert.Contains(t, l.buf.String(), "something") } func TestPrefixedDebugger_Log(t *testing.T) { t.Parallel() l := newMockLogger() d := rootDebugger{l: l}.Push("FIZZ") d.Log("foo", "bar") assert.Contains(t, l.buf.String(), "FIZZ") assert.Contains(t, l.buf.String(), "foo bar") } func TestPrefixedDebugger_Logf(t *testing.T) { t.Parallel() l := newMockLogger() d := rootDebugger{l: l}.Push("FIZZ") d.Logf("foo%s", "bar") assert.Contains(t, l.buf.String(), "FIZZ") assert.Contains(t, l.buf.String(), "foobar") } func TestPrefixedDebugger_Fail(t *testing.T) { t.Parallel() var failed bool fail := func(msgs ...interface{}) { assert.Contains(t, msgs[0], "FIZZ") assert.Contains(t, msgs[0], "foobar") failed = true } d := rootDebugger{l: newMockLogger(), fail: fail}.Push("FIZZ") d.Fail("foo", "bar") assert.True(t, failed) } func TestPrefixedDebugger_Failf(t *testing.T) { t.Parallel() var failed bool fail := func(msgs ...interface{}) { assert.Contains(t, msgs[0], "FIZZ") assert.Contains(t, msgs[0], "foo bar") failed = true } d := rootDebugger{l: newMockLogger(), fail: fail}.Push("FIZZ") d.Failf("foo %s", "bar") assert.True(t, failed) } func TestPrefixedDebugger_Debug(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} d := rd.Push("FIZZ") d.Debug("foo") assert.Empty(t, l.buf.String()) rd.logDebugs = true d = rd.Push("FIZZ") d.Debug("bar") assert.Contains(t, l.buf.String(), "bar") assert.Contains(t, l.buf.String(), "FIZZ") } func TestPrefixedDebugger_Debugf(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} d := rd.Push("FIZZ") d.Debugf("foo%s", "bar") assert.Empty(t, l.buf.String()) rd.logDebugs = true d = rd.Push("FIZZ") d.Debugf("foo%s", "bar") assert.Contains(t, l.buf.String(), "foobar") assert.Contains(t, l.buf.String(), "FIZZ") } func TestPrefixedDebugger_CheckErr(t *testing.T) { t.Parallel() e := errors.New("bad error") errd := false errfn := func(err error, msg ...interface{}) { assert.Equal(t, e, err) assert.Contains(t, msg[0], "foo") assert.Contains(t, msg[0], "FIZZ") errd = true } d := rootDebugger{err: errfn}.Push("FIZZ") d.CheckErr(nil, "fizz") assert.False(t, errd) d.CheckErr(e, "foo") assert.True(t, errd) } func TestPrefixedDebugger_Assert(t *testing.T) { t.Parallel() failed := false fail := func(msgs ...interface{}) { assert.Contains(t, msgs[0], "FIZZ") assert.Contains(t, msgs[0], "foo") failed = true } d := rootDebugger{fail: fail}.Push("FIZZ") d.Assert(1 == 1, "fizz") assert.False(t, failed) d.Assert(1 == 0, "foo") assert.True(t, failed) } func TestPrefixedDebugger_Pop(t *testing.T) { t.Parallel() rd := rootDebugger{} d := rd.Push("FOO") assert.Equal(t, rd, d.Pop()) } func TestPrefixedDebugger_Push(t *testing.T) { t.Parallel() l := newMockLogger() rd := rootDebugger{l: l} d := rd.Push("FOO").Push("BAR") d.Log("fizz") assert.Contains(t, l.buf.String(), "FOO") assert.Contains(t, l.buf.String(), "BAR") } func TestPrefixedDebugger_Push_Format(t *testing.T) { t.Parallel() l := newMockLogger() d := rootDebugger{l: l}.Push("foo").Push("bar") d.Logf("%s", "baz") assert.Equal(t, "[foo][bar] baz\n", l.buf.String()) } func TestPrefixedDebugger_Exit(t *testing.T) { t.Parallel() md := InitMockDebugger() d := &prefixedDebugger{parent: md} d.Exit(123) assert.True(t, md.Exited()) assert.Equal(t, 123, md.ExitCode()) } func TestInitDebugger(t *testing.T) { t.Parallel() d := initDebugger(true, nil) assert.NotNil(t, d) } func TestMockDebugger_Output(t *testing.T) { t.Parallel() md := InitMockDebugger() md.Log("foobar") b, _ := ioutil.ReadAll(md.Output()) assert.Equal(t, "foobar\n", string(b)) } protoc-gen-star-2.0.3/docs.go000066400000000000000000000001121440740147700160210ustar00rootroot00000000000000// Package pgs provides a library for building protoc plugins package pgs protoc-gen-star-2.0.3/entity.go000066400000000000000000000054401440740147700164160ustar00rootroot00000000000000package pgs import "google.golang.org/protobuf/runtime/protoimpl" // Entity describes any member of the proto AST that is extensible via // options. All components of a File are considered entities. type Entity interface { Node // The Name of the entity Name() Name // The fully qualified name of the entity. For example, a message // 'HelloRequest' in a 'helloworld' package takes the form of // '.helloworld.HelloRequest'. FullyQualifiedName() string // Syntax identifies whether this entity is encoded with proto2 or proto3 // syntax. Syntax() Syntax // Package returns the container package for this entity. Package() Package // Imports includes external files directly required by this entity. Call // TransitiveImports on File to get all transitive dependencies. Imports() []File // File returns the File containing this entity. File() File // Extension extracts an extension from the entity's options, described by // desc and populates the value ext. Ext must be a pointer type. An error // will only be returned if there is a type mismatch between desc and ext. // The ok value will be true if the extension was found. If the extension // is NOT found, ok will be false and err will be nil. Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (ok bool, err error) // BuildTarget identifies whether or not generation should be performed on // this entity. Use this flag to determine if the file was targeted in the // protoc run or if it was loaded as an external dependency. BuildTarget() bool // SourceCodeInfo returns the SourceCodeInfo associated with the entity. // Primarily, this struct contains the comments associated with the Entity. SourceCodeInfo() SourceCodeInfo childAtPath(path []int32) Entity addSourceCodeInfo(info SourceCodeInfo) } // A ParentEntity is any Entity type that can contain messages and/or enums. // File and Message types implement ParentEntity. type ParentEntity interface { Entity // Messages returns the top-level messages from this entity. Nested // messages are not included. Messages() []Message // AllMessages returns all the top-level and nested messages from this Entity. AllMessages() []Message // MapEntries returns the MapEntry message types contained within this // Entity. These messages are not returned by the Messages or AllMessages // methods. Map Entry messages are typically not exposed to the end user. MapEntries() []Message // Enums returns the top-level enums from this entity. Nested enums // are not included. Enums() []Enum // AllEnums returns all top-level and nested enums from this entity. AllEnums() []Enum // DefinedExtensions returns all Extensions defined on this entity. DefinedExtensions() []Extension addMessage(m Message) addMapEntry(m Message) addEnum(e Enum) addDefExtension(e Extension) } protoc-gen-star-2.0.3/enum.go000066400000000000000000000062521440740147700160500ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // Enum describes an enumeration type. Its parent can be either a Message or a // File. type Enum interface { Entity // Descriptor returns the proto descriptor for this Enum Descriptor() *descriptor.EnumDescriptorProto // Parent resolves to either a Message or File that directly contains this // Enum. Parent() ParentEntity // Values returns each defined enumeration value. Values() []EnumValue // Dependents returns all of the messages where Enum is directly or // transitively used. Dependents() []Message addValue(v EnumValue) addDependent(m Message) setParent(p ParentEntity) } type enum struct { desc *descriptor.EnumDescriptorProto parent ParentEntity vals []EnumValue info SourceCodeInfo fqn string dependents []Message dependentsCache map[string]Message } func (e *enum) Name() Name { return Name(e.desc.GetName()) } func (e *enum) FullyQualifiedName() string { return e.fqn } func (e *enum) Syntax() Syntax { return e.parent.Syntax() } func (e *enum) Package() Package { return e.parent.Package() } func (e *enum) File() File { return e.parent.File() } func (e *enum) BuildTarget() bool { return e.parent.BuildTarget() } func (e *enum) SourceCodeInfo() SourceCodeInfo { return e.info } func (e *enum) Descriptor() *descriptor.EnumDescriptorProto { return e.desc } func (e *enum) Parent() ParentEntity { return e.parent } func (e *enum) Imports() []File { return nil } func (e *enum) Values() []EnumValue { return e.vals } func (e *enum) populateDependentsCache() { if e.dependentsCache != nil { return } e.dependentsCache = map[string]Message{} for _, dep := range e.dependents { e.dependentsCache[dep.FullyQualifiedName()] = dep dep.getDependents(e.dependentsCache) } } func (e *enum) Dependents() []Message { e.populateDependentsCache() return messageSetToSlice("", e.dependentsCache) } func (e *enum) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (bool, error) { return extension(e.desc.GetOptions(), desc, &ext) } func (e *enum) accept(v Visitor) (err error) { if v == nil { return nil } if v, err = v.VisitEnum(e); err != nil || v == nil { return } for _, ev := range e.vals { if err = ev.accept(v); err != nil { return } } return } func (e *enum) addDependent(m Message) { e.dependents = append(e.dependents, m) } func (e *enum) addValue(v EnumValue) { v.setEnum(e) e.vals = append(e.vals, v) } func (e *enum) setParent(p ParentEntity) { e.parent = p } func (e *enum) childAtPath(path []int32) Entity { switch { case len(path) == 0: return e case len(path)%2 != 0: return nil case path[0] == enumTypeValuePath: return e.vals[path[1]].childAtPath(path[2:]) default: return nil } } func (e *enum) addSourceCodeInfo(info SourceCodeInfo) { e.info = info } var _ Enum = (*enum)(nil) protoc-gen-star-2.0.3/enum_test.go000066400000000000000000000076131440740147700171110ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestEnum_Name(t *testing.T) { t.Parallel() e := &enum{desc: &descriptor.EnumDescriptorProto{Name: proto.String("foo")}} assert.Equal(t, "foo", e.Name().String()) } func TestEnum_FullyQualifiedName(t *testing.T) { t.Parallel() e := &enum{fqn: "enum"} assert.Equal(t, e.fqn, e.FullyQualifiedName()) } func TestEnum_Syntax(t *testing.T) { t.Parallel() e := &enum{} f := dummyFile() f.addEnum(e) assert.Equal(t, f.Syntax(), e.Syntax()) } func TestEnum_Package(t *testing.T) { t.Parallel() e := &enum{} f := dummyFile() f.addEnum(e) assert.NotNil(t, e.Package()) assert.Equal(t, f.Package(), e.Package()) } func TestEnum_File(t *testing.T) { t.Parallel() e := &enum{} m := dummyMsg() m.addEnum(e) assert.NotNil(t, e.File()) assert.Equal(t, m.File(), e.File()) } func TestEnum_BuildTarget(t *testing.T) { t.Parallel() e := &enum{} f := dummyFile() f.addEnum(e) assert.False(t, e.BuildTarget()) f.buildTarget = true assert.True(t, e.BuildTarget()) } func TestEnum_Descriptor(t *testing.T) { t.Parallel() e := &enum{desc: &descriptor.EnumDescriptorProto{}} assert.Equal(t, e.desc, e.Descriptor()) } func TestEnum_Parent(t *testing.T) { t.Parallel() e := &enum{} f := dummyFile() f.addEnum(e) assert.Equal(t, f, e.Parent()) } func TestEnum_Imports(t *testing.T) { t.Parallel() assert.Nil(t, (&enum{}).Imports()) } func TestEnum_Values(t *testing.T) { t.Parallel() e := &enum{} assert.Empty(t, e.Values()) e.addValue(&enumVal{}) assert.Len(t, e.Values(), 1) } func TestEnum_Dependents(t *testing.T) { t.Parallel() t.Run("enum in file", func(t *testing.T) { t.Parallel() e := &enum{} f := dummyFile() f.addEnum(e) assert.Empty(t, e.Dependents()) }) t.Run("enum in message", func(t *testing.T) { t.Parallel() e := &enum{} m := dummyMsg() m.addEnum(e) assert.Empty(t, e.Dependents()) }) t.Run("external dependents", func(t *testing.T) { t.Parallel() pkg := dummyPkg() f := &file{ pkg: pkg, desc: &descriptor.FileDescriptorProto{ Package: proto.String(pkg.ProtoName().String()), Syntax: proto.String(string(Proto3)), Name: proto.String("test_file.proto"), }, } e := &enum{} e.fqn = fullyQualifiedName(f, e) m := dummyMsg() m.fqn = fullyQualifiedName(f, m) e.addDependent(m) deps := e.Dependents() assert.Len(t, deps, 1) assert.Contains(t, deps, m) }) } func TestEnum_Extension(t *testing.T) { // cannot be parallel e := &enum{desc: &descriptor.EnumDescriptorProto{}} assert.NotPanics(t, func() { e.Extension(nil, nil) }) } func TestEnum_Accept(t *testing.T) { t.Parallel() e := &enum{} e.addValue(&enumVal{}) assert.NoError(t, e.accept(nil)) v := &mockVisitor{} assert.NoError(t, e.accept(v)) assert.Equal(t, 1, v.enum) assert.Zero(t, v.enumvalue) v.Reset() v.err = errors.New("") v.v = v assert.Error(t, e.accept(v)) assert.Equal(t, 1, v.enum) assert.Zero(t, v.enumvalue) v.Reset() assert.NoError(t, e.accept(v)) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.enumvalue) v.Reset() e.addValue(&mockEnumValue{err: errors.New("")}) assert.Error(t, e.accept(v)) assert.Equal(t, 1, v.enum) assert.Equal(t, 2, v.enumvalue) } func TestEnum_ChildAtPath(t *testing.T) { t.Parallel() e := &enum{} assert.Equal(t, e, e.childAtPath(nil)) assert.Nil(t, e.childAtPath([]int32{1})) assert.Nil(t, e.childAtPath([]int32{999, 123})) } type mockEnum struct { Enum p ParentEntity err error } func (e *mockEnum) setParent(p ParentEntity) { e.p = p } func (e *mockEnum) accept(v Visitor) error { _, err := v.VisitEnum(e) if e.err != nil { return e.err } return err } func dummyEnum() *enum { f := dummyFile() e := &enum{desc: &descriptor.EnumDescriptorProto{Name: proto.String("enum")}} f.addEnum(e) return e } protoc-gen-star-2.0.3/enum_value.go000066400000000000000000000042401440740147700172370ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // An EnumValue describes a name-value pair for an entry in an enum. type EnumValue interface { Entity // Descriptor returns the proto descriptor for this Enum Value Descriptor() *descriptor.EnumValueDescriptorProto // Enum returns the parent Enum for this value Enum() Enum // Value returns the numeric enum value associated with this type Value() int32 setEnum(e Enum) } type enumVal struct { desc *descriptor.EnumValueDescriptorProto enum Enum fqn string info SourceCodeInfo } func (ev *enumVal) Name() Name { return Name(ev.desc.GetName()) } func (ev *enumVal) FullyQualifiedName() string { return ev.fqn } func (ev *enumVal) Syntax() Syntax { return ev.enum.Syntax() } func (ev *enumVal) Package() Package { return ev.enum.Package() } func (ev *enumVal) File() File { return ev.enum.File() } func (ev *enumVal) BuildTarget() bool { return ev.enum.BuildTarget() } func (ev *enumVal) SourceCodeInfo() SourceCodeInfo { return ev.info } func (ev *enumVal) Descriptor() *descriptor.EnumValueDescriptorProto { return ev.desc } func (ev *enumVal) Enum() Enum { return ev.enum } func (ev *enumVal) Value() int32 { return ev.desc.GetNumber() } func (ev *enumVal) Imports() []File { return nil } func (ev *enumVal) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (bool, error) { return extension(ev.desc.GetOptions(), desc, &ext) } func (ev *enumVal) accept(v Visitor) (err error) { if v == nil { return nil } _, err = v.VisitEnumValue(ev) return } func (ev *enumVal) setEnum(e Enum) { ev.enum = e } func (ev *enumVal) childAtPath(path []int32) Entity { if len(path) == 0 { return ev } return nil } func (ev *enumVal) addSourceCodeInfo(info SourceCodeInfo) { ev.info = info } var _ EnumValue = (*enumVal)(nil) protoc-gen-star-2.0.3/enum_value_test.go000066400000000000000000000051371440740147700203040ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestEnumVal_Name(t *testing.T) { t.Parallel() ev := &enumVal{desc: &descriptor.EnumValueDescriptorProto{Name: proto.String("eval")}} assert.Equal(t, "eval", ev.Name().String()) } func TestEnumVal_FullyQualifiedName(t *testing.T) { t.Parallel() ev := &enumVal{fqn: "ev"} assert.Equal(t, ev.fqn, ev.FullyQualifiedName()) } func TestEnumVal_Syntax(t *testing.T) { t.Parallel() ev := &enumVal{} e := dummyEnum() e.addValue(ev) assert.Equal(t, e.Syntax(), ev.Syntax()) } func TestEnumVal_Package(t *testing.T) { t.Parallel() ev := &enumVal{} e := dummyEnum() e.addValue(ev) assert.NotNil(t, ev.Package()) assert.Equal(t, e.Package(), ev.Package()) } func TestEnumVal_File(t *testing.T) { t.Parallel() ev := &enumVal{} e := dummyEnum() e.addValue(ev) assert.NotNil(t, ev.File()) assert.Equal(t, e.File(), ev.File()) } func TestEnumVal_BuildTarget(t *testing.T) { t.Parallel() ev := &enumVal{} e := dummyEnum() e.addValue(ev) assert.False(t, ev.BuildTarget()) e.parent = &file{buildTarget: true} assert.True(t, ev.BuildTarget()) } func TestEnumVal_Descriptor(t *testing.T) { t.Parallel() ev := &enumVal{desc: &descriptor.EnumValueDescriptorProto{}} assert.Equal(t, ev.desc, ev.Descriptor()) } func TestEnumVal_Enum(t *testing.T) { t.Parallel() ev := &enumVal{} e := dummyEnum() e.addValue(ev) assert.Equal(t, e, ev.Enum()) } func TestEnumVal_Value(t *testing.T) { t.Parallel() ev := &enumVal{desc: &descriptor.EnumValueDescriptorProto{Number: proto.Int32(123)}} assert.Equal(t, int32(123), ev.Value()) } func TestEnumVal_Imports(t *testing.T) { t.Parallel() assert.Nil(t, (&enumVal{}).Imports()) } func TestEnumVal_Extension(t *testing.T) { // cannot be parallel ev := &enumVal{desc: &descriptor.EnumValueDescriptorProto{}} assert.NotPanics(t, func() { ev.Extension(nil, nil) }) } func TestEnumVal_Accept(t *testing.T) { t.Parallel() ev := &enumVal{} assert.NoError(t, ev.accept(nil)) v := &mockVisitor{err: errors.New("")} assert.Error(t, ev.accept(v)) assert.Equal(t, 1, v.enumvalue) } func TestEnumVal_ChildAtPath(t *testing.T) { t.Parallel() ev := &enumVal{} assert.Equal(t, ev, ev.childAtPath(nil)) assert.Nil(t, ev.childAtPath([]int32{1})) } type mockEnumValue struct { EnumValue e Enum err error } func (ev *mockEnumValue) setEnum(e Enum) { ev.e = e } func (ev *mockEnumValue) accept(v Visitor) error { _, err := v.VisitEnumValue(ev) if ev.err != nil { return ev.err } return err } protoc-gen-star-2.0.3/extension.go000066400000000000000000000062571440740147700171250ustar00rootroot00000000000000package pgs import ( "errors" "fmt" "reflect" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/runtime/protoimpl" ) // An Extension is a custom option annotation that can be applied to an Entity to provide additional // semantic details and metadata about the Entity. type Extension interface { Field // ParentEntity returns the ParentEntity where the Extension is defined DefinedIn() ParentEntity // Extendee returns the Message that the Extension is extending Extendee() Message setExtendee(m Message) } type ext struct { field parent ParentEntity extendee Message fqn string } func (e *ext) FullyQualifiedName() string { return e.fqn } func (e *ext) Syntax() Syntax { return e.parent.Syntax() } func (e *ext) Package() Package { return e.parent.Package() } func (e *ext) File() File { return e.parent.File() } func (e *ext) BuildTarget() bool { return e.parent.BuildTarget() } func (e *ext) DefinedIn() ParentEntity { return e.parent } func (e *ext) Extendee() Message { return e.extendee } func (e *ext) Message() Message { return nil } func (e *ext) InOneOf() bool { return false } func (e *ext) OneOf() OneOf { return nil } func (e *ext) setMessage(m Message) {} // noop func (e *ext) setOneOf(o OneOf) {} // noop func (e *ext) setExtendee(m Message) { e.extendee = m } func (e *ext) accept(v Visitor) (err error) { if v == nil { return } _, err = v.VisitExtension(e) return } var extractor extExtractor func init() { extractor = protoExtExtractor{} } type extExtractor interface { HasExtension(proto.Message, *protoimpl.ExtensionInfo) bool GetExtension(proto.Message, *protoimpl.ExtensionInfo) interface{} } type protoExtExtractor struct{} func (e protoExtExtractor) HasExtension(pb proto.Message, ext *protoimpl.ExtensionInfo) bool { return proto.HasExtension(pb, ext) } func (e protoExtExtractor) GetExtension(pb proto.Message, ext *protoimpl.ExtensionInfo) interface{} { return proto.GetExtension(pb, ext) } func extension(opts proto.Message, e *protoimpl.ExtensionInfo, out interface{}) (bool, error) { if opts == nil || reflect.ValueOf(opts).IsNil() { return false, nil } if e == nil { return false, errors.New("nil *protoimpl.ExtensionInfo parameter provided") } if out == nil { return false, errors.New("nil extension output parameter provided") } o := reflect.ValueOf(out) if o.Kind() != reflect.Ptr { return false, errors.New("out parameter must be a pointer type") } if !extractor.HasExtension(opts, e) { return false, nil } val := extractor.GetExtension(opts, e) if val == nil { return false, errors.New("extracted extension value is nil") } v := reflect.ValueOf(val) for v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface { v = v.Elem() } for o.Kind() == reflect.Ptr || o.Kind() == reflect.Interface { if o.Kind() == reflect.Ptr && o.IsNil() { o.Set(reflect.New(o.Type().Elem())) } o = o.Elem() } if v.Type().AssignableTo(o.Type()) { o.Set(v) return true, nil } return true, fmt.Errorf("cannot assign extension type %q to output type %q", v.Type().String(), o.Type().String()) } protoc-gen-star-2.0.3/extension_test.go000066400000000000000000000075601440740147700201620ustar00rootroot00000000000000package pgs import ( "bytes" "errors" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/runtime/protoimpl" ) func TestExt_FullyQualifiedName(t *testing.T) { t.Parallel() e := &ext{fqn: "foo"} assert.Equal(t, e.fqn, e.FullyQualifiedName()) } func TestExt_Syntax(t *testing.T) { t.Parallel() msg := dummyMsg() e := &ext{parent: msg} assert.Equal(t, msg.Syntax(), e.Syntax()) } func TestExt_Package(t *testing.T) { t.Parallel() msg := dummyMsg() e := &ext{parent: msg} assert.Equal(t, msg.Package(), e.Package()) } func TestExt_File(t *testing.T) { t.Parallel() msg := dummyMsg() e := &ext{parent: msg} assert.Equal(t, msg.File(), e.File()) } func TestExt_BuildTarget(t *testing.T) { t.Parallel() msg := dummyMsg() e := &ext{parent: msg} assert.Equal(t, msg.BuildTarget(), e.BuildTarget()) } func TestExt_ParentEntity(t *testing.T) { t.Parallel() msg := dummyMsg() e := &ext{parent: msg} assert.Equal(t, msg, e.DefinedIn()) } func TestExt_Extendee(t *testing.T) { t.Parallel() msg := dummyMsg() e := &ext{} e.setExtendee(msg) assert.Equal(t, msg, e.Extendee()) } func TestExt_Message(t *testing.T) { t.Parallel() e := &ext{} assert.Nil(t, e.Message()) } func TestExt_InOneOf(t *testing.T) { t.Parallel() e := &ext{} assert.False(t, e.InOneOf()) } func TestExt_OneOf(t *testing.T) { t.Parallel() e := &ext{} assert.Nil(t, e.OneOf()) } func TestExt_Accept(t *testing.T) { t.Parallel() e := &ext{} assert.NoError(t, e.accept(nil)) v := &mockVisitor{err: errors.New("")} assert.Error(t, e.accept(v)) assert.Equal(t, 1, v.extension) } type mockExtractor struct { has bool get interface{} } func (e *mockExtractor) HasExtension(proto.Message, *protoimpl.ExtensionInfo) bool { return e.has } func (e *mockExtractor) GetExtension(proto.Message, *protoimpl.ExtensionInfo) interface{} { return e.get } var testExtractor = &mockExtractor{} func init() { extractor = testExtractor } func TestExtension(t *testing.T) { // cannot be parallel defer func() { testExtractor.get = nil }() found, err := extension(nil, nil, nil) assert.False(t, found) assert.NoError(t, err) found, err = extension(proto.Message(nil), nil, nil) assert.False(t, found) assert.NoError(t, err) opts := &struct{ proto.Message }{} found, err = extension(opts, nil, nil) assert.False(t, found) assert.EqualError(t, err, "nil *protoimpl.ExtensionInfo parameter provided") desc := &protoimpl.ExtensionInfo{} found, err = extension(opts, desc, nil) assert.False(t, found) assert.EqualError(t, err, "nil extension output parameter provided") type myExt struct{ Name string } found, err = extension(opts, desc, &myExt{}) assert.False(t, found) assert.NoError(t, err) testExtractor.has = true found, err = extension(opts, desc, &myExt{}) assert.False(t, found) assert.EqualError(t, err, "extracted extension value is nil") testExtractor.get = &myExt{"bar"} out := myExt{} found, err = extension(opts, desc, out) assert.False(t, found) assert.EqualError(t, err, "out parameter must be a pointer type") found, err = extension(opts, desc, &out) assert.True(t, found) assert.NoError(t, err) assert.Equal(t, "bar", out.Name) var ref *myExt found, err = extension(opts, desc, &ref) assert.True(t, found) assert.NoError(t, err) assert.Equal(t, "bar", ref.Name) found, err = extension(opts, desc, &bytes.Buffer{}) assert.True(t, found) assert.Error(t, err) } func TestProtoExtExtractor(t *testing.T) { e := protoExtExtractor{} assert.NotPanics(t, func() { e.HasExtension(nil, nil) }) assert.Panics(t, func() { e.GetExtension(nil, nil) }) } // needed to wrapped since there is a Extension method type mExt interface { Extension } type mockExtension struct { mExt err error } func (e *mockExtension) accept(v Visitor) error { _, err := v.VisitExtension(e) if e.err != nil { return e.err } return err } protoc-gen-star-2.0.3/field.go000066400000000000000000000102061440740147700161610ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // A Field describes a member of a Message. A field may also be a member of a // OneOf on the Message. type Field interface { Entity // Descriptor returns the proto descriptor for this field Descriptor() *descriptor.FieldDescriptorProto // Message returns the Message containing this Field. Message() Message // InOneOf returns true if the field is in a OneOf of the parent Message. // This will return true for synthetic oneofs (proto3 field presence) as well. InOneOf() bool // InRealOneOf returns true if the field is in a OneOf of the parent Message. // This will return false for synthetic oneofs, and will only include 'real' oneofs. // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/field_presence.md InRealOneOf() bool // OneOf returns the OneOf that this field is a part of. Nil is returned if // the field is not within a OneOf. OneOf() OneOf // Type returns the FieldType of this Field. Type() FieldType // HasPresence returns true for all fields that have explicit presence as defined by: // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/field_presence.md HasPresence() bool // HasOptionalKeyword returns whether the field is labeled as optional. HasOptionalKeyword() bool // Required returns whether the field is labeled as required. This // will only be true if the syntax is proto2. Required() bool setMessage(m Message) setOneOf(o OneOf) addType(t FieldType) } type field struct { desc *descriptor.FieldDescriptorProto fqn string msg Message oneof OneOf typ FieldType info SourceCodeInfo } func (f *field) Name() Name { return Name(f.desc.GetName()) } func (f *field) FullyQualifiedName() string { return f.fqn } func (f *field) Syntax() Syntax { return f.msg.Syntax() } func (f *field) Package() Package { return f.msg.Package() } func (f *field) Imports() []File { return f.typ.Imports() } func (f *field) File() File { return f.msg.File() } func (f *field) BuildTarget() bool { return f.msg.BuildTarget() } func (f *field) SourceCodeInfo() SourceCodeInfo { return f.info } func (f *field) Descriptor() *descriptor.FieldDescriptorProto { return f.desc } func (f *field) Message() Message { return f.msg } func (f *field) InOneOf() bool { return f.oneof != nil } func (f *field) OneOf() OneOf { return f.oneof } func (f *field) Type() FieldType { return f.typ } func (f *field) setMessage(m Message) { f.msg = m } func (f *field) setOneOf(o OneOf) { f.oneof = o } func (f *field) InRealOneOf() bool { return f.InOneOf() && !f.desc.GetProto3Optional() } func (f *field) HasPresence() bool { if f.InOneOf() { return true } if f.Type().IsEmbed() { return true } if !f.Type().IsRepeated() && !f.Type().IsMap() { if f.Syntax() == Proto2 { return true } return f.HasOptionalKeyword() } return false } func (f *field) HasOptionalKeyword() bool { if f.Syntax() == Proto3 { return f.desc.GetProto3Optional() } return f.desc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_OPTIONAL } func (f *field) Required() bool { return f.Syntax().SupportsRequiredPrefix() && f.desc.GetLabel() == descriptor.FieldDescriptorProto_LABEL_REQUIRED } func (f *field) addType(t FieldType) { t.setField(f) f.typ = t } func (f *field) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (ok bool, err error) { return extension(f.desc.GetOptions(), desc, &ext) } func (f *field) accept(v Visitor) (err error) { if v == nil { return } _, err = v.VisitField(f) return } func (f *field) childAtPath(path []int32) Entity { if len(path) == 0 { return f } return nil } func (f *field) addSourceCodeInfo(info SourceCodeInfo) { f.info = info } var _ Field = (*field)(nil) protoc-gen-star-2.0.3/field_test.go000066400000000000000000000122351440740147700172240ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestField_Name(t *testing.T) { t.Parallel() f := &field{desc: &descriptor.FieldDescriptorProto{Name: proto.String("foo")}} assert.Equal(t, "foo", f.Name().String()) } func TestField_FullyQualifiedName(t *testing.T) { t.Parallel() f := &field{fqn: "field"} assert.Equal(t, f.fqn, f.FullyQualifiedName()) } func TestField_Syntax(t *testing.T) { t.Parallel() f := &field{} m := dummyMsg() m.addField(f) assert.Equal(t, m.Syntax(), f.Syntax()) } func TestField_Package(t *testing.T) { t.Parallel() f := &field{} m := dummyMsg() m.addField(f) assert.NotNil(t, f.Package()) assert.Equal(t, m.Package(), f.Package()) } func TestField_File(t *testing.T) { t.Parallel() f := &field{} m := dummyMsg() m.addField(f) assert.NotNil(t, f.File()) assert.Equal(t, m.File(), f.File()) } func TestField_BuildTarget(t *testing.T) { t.Parallel() f := &field{} m := dummyMsg() m.addField(f) assert.False(t, f.BuildTarget()) m.setParent(&file{buildTarget: true}) assert.True(t, f.BuildTarget()) } func TestField_Descriptor(t *testing.T) { t.Parallel() f := &field{desc: &descriptor.FieldDescriptorProto{}} assert.Equal(t, f.desc, f.Descriptor()) } func TestField_Message(t *testing.T) { t.Parallel() f := &field{} m := dummyMsg() m.addField(f) assert.Equal(t, m, f.Message()) } func TestField_OneOf(t *testing.T) { t.Parallel() f := &field{} assert.Nil(t, f.OneOf()) assert.False(t, f.InOneOf()) o := dummyOneof() o.addField(f) assert.Equal(t, o, f.OneOf()) assert.True(t, f.InOneOf()) } func TestField_InRealOneOf(t *testing.T) { t.Parallel() f := dummyField() assert.False(t, f.InRealOneOf()) f = dummyOneOfField(false) assert.True(t, f.InRealOneOf()) f = dummyOneOfField(true) assert.False(t, f.InRealOneOf()) } func TestField_HasPresence(t *testing.T) { t.Parallel() f := dummyField() f.addType(&repT{scalarT: &scalarT{}}) assert.False(t, f.HasPresence()) f.addType(&mapT{repT: &repT{scalarT: &scalarT{}}}) assert.False(t, f.HasPresence()) f.addType(&scalarT{}) assert.False(t, f.HasPresence()) opt := true f.desc = &descriptor.FieldDescriptorProto{Proto3Optional: &opt} assert.True(t, f.HasPresence()) } func TestField_HasOptionalKeyword(t *testing.T) { t.Parallel() optLabel := descriptor.FieldDescriptorProto_LABEL_OPTIONAL f := &field{msg: &msg{parent: dummyFile()}} assert.False(t, f.HasOptionalKeyword()) f.desc = &descriptor.FieldDescriptorProto{Label: &optLabel} assert.False(t, f.HasOptionalKeyword()) f = dummyField() assert.False(t, f.HasOptionalKeyword()) f = dummyOneOfField(false) assert.False(t, f.HasOptionalKeyword()) f = dummyOneOfField(true) assert.True(t, f.HasOptionalKeyword()) } func TestField_Type(t *testing.T) { t.Parallel() f := &field{} f.addType(&scalarT{}) assert.Equal(t, f.typ, f.Type()) } func TestField_Extension(t *testing.T) { // cannot be parallel f := &field{desc: &descriptor.FieldDescriptorProto{}} assert.NotPanics(t, func() { f.Extension(nil, nil) }) } func TestField_Accept(t *testing.T) { t.Parallel() f := &field{} assert.NoError(t, f.accept(nil)) v := &mockVisitor{err: errors.New("")} assert.Error(t, f.accept(v)) assert.Equal(t, 1, v.field) } func TestField_Imports(t *testing.T) { t.Parallel() f := &field{} f.addType(&scalarT{}) assert.Empty(t, f.Imports()) f.addType(&mockT{i: []File{&file{}, &file{}}}) assert.Len(t, f.Imports(), 2) } func TestField_Required(t *testing.T) { t.Parallel() msg := dummyMsg() lbl := descriptor.FieldDescriptorProto_LABEL_REQUIRED f := &field{desc: &descriptor.FieldDescriptorProto{Label: &lbl}} f.setMessage(msg) assert.False(t, f.Required(), "proto3 messages can never be marked required") f.File().(*file).desc.Syntax = proto.String(string(Proto2)) assert.True(t, f.Required(), "proto2 + required") lbl = descriptor.FieldDescriptorProto_LABEL_OPTIONAL f.desc.Label = &lbl assert.False(t, f.Required(), "proto2 + optional") } func TestField_ChildAtPath(t *testing.T) { t.Parallel() f := &field{} assert.Equal(t, f, f.childAtPath(nil)) assert.Nil(t, f.childAtPath([]int32{1})) } type mockField struct { Field i []File m Message err error } func (f *mockField) Imports() []File { return f.i } func (f *mockField) setMessage(m Message) { f.m = m } func (f *mockField) accept(v Visitor) error { _, err := v.VisitField(f) if f.err != nil { return f.err } return err } func dummyField() *field { m := dummyMsg() str := descriptor.FieldDescriptorProto_TYPE_STRING f := &field{desc: &descriptor.FieldDescriptorProto{Name: proto.String("field"), Type: &str}} m.addField(f) t := &scalarT{} f.addType(t) return f } func dummyOneOfField(synthetic bool) *field { m := dummyMsg() o := dummyOneof() str := descriptor.FieldDescriptorProto_TYPE_STRING var oIndex int32 = 1 f := &field{desc: &descriptor.FieldDescriptorProto{ Name: proto.String("field"), Type: &str, OneofIndex: &oIndex, Proto3Optional: &synthetic, }} o.addField(f) m.addField(f) m.addOneOf(o) t := &scalarT{} f.addType(t) return f } protoc-gen-star-2.0.3/field_type.go000066400000000000000000000113621440740147700172260ustar00rootroot00000000000000package pgs // FieldType describes the type of a Field. type FieldType interface { // Field returns the parent Field of this type. While two FieldTypes might be // equivalent, each instance of a FieldType is tied to its Field. Field() Field // IsRepeated returns true if and only if the field is marked as "repeated". // While map fields may be labeled as repeated, this method will not return // true for them. IsRepeated() bool // IsMap returns true if the field is a map type. IsMap() bool // IsEnum returns true if the field is a singular enum value. Maps or // repeated fields containing enums will still return false. IsEnum() bool // IsEmbed returns true if the field is a singular message value. Maps or // repeated fields containing embeds will still return false. IsEmbed() bool // IsOptional returns true if the field is prefixed as optional. IsOptional() bool // IsRequired returns true if and only if the field is prefixed as required. IsRequired() bool // ProtoType returns the ProtoType value for this field. ProtoType() ProtoType // ProtoLabel returns the ProtoLabel value for this field. ProtoLabel() ProtoLabel // Imports includes all external proto files required by this field. Imports() []File // Enum returns the Enum associated with this FieldType. If IsEnum returns // false, this value will be nil. Enum() Enum // Embed returns the embedded Message associated with this FieldType. If // IsEmbed returns false, this value will be nil. Embed() Message // Element returns the FieldTypeElem representing the element component of // the type. // // For repeated fields, the returned type describes the type being repeated (i.e., // the element type in the list implied by the repeated field). // // For maps, the returned type describes the type of values in the map. // // Nil will be returned if IsRepeated and IsMap both return false. Element() FieldTypeElem // Key returns the FieldTypeElem representing the key component of the type (i.e, // the type of keys in a map). // // Nil will be returned if IsMap returns false. Key() FieldTypeElem setField(f Field) toElem() FieldTypeElem } type scalarT struct{ fld Field } func (s *scalarT) Field() Field { return s.fld } func (s *scalarT) IsRepeated() bool { return false } func (s *scalarT) IsMap() bool { return false } func (s *scalarT) IsEnum() bool { return false } func (s *scalarT) IsEmbed() bool { return false } func (s *scalarT) ProtoType() ProtoType { return ProtoType(s.fld.Descriptor().GetType()) } func (s *scalarT) ProtoLabel() ProtoLabel { return ProtoLabel(s.fld.Descriptor().GetLabel()) } func (s *scalarT) Imports() []File { return nil } func (s *scalarT) setField(f Field) { s.fld = f } func (s *scalarT) Enum() Enum { return nil } func (s *scalarT) Embed() Message { return nil } func (s *scalarT) Element() FieldTypeElem { return nil } func (s *scalarT) Key() FieldTypeElem { return nil } func (s *scalarT) IsOptional() bool { return !s.fld.Syntax().SupportsRequiredPrefix() || s.ProtoLabel() == Optional } func (s *scalarT) IsRequired() bool { return s.fld.Syntax().SupportsRequiredPrefix() && s.ProtoLabel() == Required } func (s *scalarT) toElem() FieldTypeElem { return &scalarE{ typ: s, ptype: s.ProtoType(), } } type enumT struct { *scalarT enum Enum } func (e *enumT) Enum() Enum { return e.enum } func (e *enumT) IsEnum() bool { return true } func (e *enumT) Imports() []File { if f := e.enum.File(); f.Name() != e.fld.File().Name() { return []File{f} } return nil } func (e *enumT) toElem() FieldTypeElem { return &enumE{ scalarE: e.scalarT.toElem().(*scalarE), enum: e.enum, } } type embedT struct { *scalarT msg Message } func (e *embedT) Embed() Message { return e.msg } func (e *embedT) IsEmbed() bool { return true } func (e *embedT) Imports() []File { if f := e.msg.File(); f.Name() != e.fld.File().Name() { return []File{f} } return nil } func (e *embedT) toElem() FieldTypeElem { return &embedE{ scalarE: e.scalarT.toElem().(*scalarE), msg: e.msg, } } type repT struct { *scalarT el FieldTypeElem } func (r *repT) IsRepeated() bool { return true } func (r *repT) Element() FieldTypeElem { return r.el } func (r *repT) Imports() []File { return r.el.Imports() } func (r *repT) toElem() FieldTypeElem { panic("cannot convert repeated FieldType to FieldTypeElem") } type mapT struct { *repT key FieldTypeElem } func (m *mapT) IsRepeated() bool { return false } func (m *mapT) IsMap() bool { return true } func (m *mapT) Key() FieldTypeElem { return m.key } var ( _ FieldType = (*scalarT)(nil) _ FieldType = (*enumT)(nil) _ FieldType = (*embedT)(nil) _ FieldType = (*repT)(nil) _ FieldType = (*mapT)(nil) ) protoc-gen-star-2.0.3/field_type_elem.go000066400000000000000000000040501440740147700202240ustar00rootroot00000000000000package pgs // FieldTypeElem describes a component of a FieldType. This type only shows up // in repeated and map FieldTypes. type FieldTypeElem interface { // ParentType returns the parent FieldType that holds this element. ParentType() FieldType // ProtoType returns the ProtoType describing this component. ProtoType() ProtoType // IsEmbed returns true if the component is an embedded message. IsEmbed() bool // IsEnum returns true if the component is an enum value. IsEnum() bool // Imports includes all external Files required by this field. Imports() []File // Enum returns the Enum associated with this FieldTypeElem. If IsEnum // returns false, this value will be nil. Enum() Enum // Embed returns the embedded Message associated with this FieldTypeElem. If // IsEmbed returns false, this value will be nil. Embed() Message setType(t FieldType) } type scalarE struct { typ FieldType ptype ProtoType } func (s *scalarE) ParentType() FieldType { return s.typ } func (s *scalarE) ProtoType() ProtoType { return s.ptype } func (s *scalarE) IsEmbed() bool { return false } func (s *scalarE) IsEnum() bool { return false } func (s *scalarE) setType(t FieldType) { s.typ = t } func (s *scalarE) Imports() []File { return nil } func (s *scalarE) Enum() Enum { return nil } func (s *scalarE) Embed() Message { return nil } type enumE struct { *scalarE enum Enum } func (e *enumE) IsEnum() bool { return true } func (e *enumE) Enum() Enum { return e.enum } func (e *enumE) Imports() []File { if f := e.enum.File(); f.Name() != e.ParentType().Field().File().Name() { return []File{f} } return nil } type embedE struct { *scalarE msg Message } func (e *embedE) IsEmbed() bool { return true } func (e *embedE) Embed() Message { return e.msg } func (e *embedE) Imports() []File { if f := e.msg.File(); f.Name() != e.ParentType().Field().File().Name() { return []File{f} } return nil } var ( _ FieldTypeElem = (*scalarE)(nil) _ FieldTypeElem = (*enumE)(nil) _ FieldTypeElem = (*embedE)(nil) ) protoc-gen-star-2.0.3/field_type_elem_test.go000066400000000000000000000041021440740147700212610ustar00rootroot00000000000000package pgs import ( "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestScalarE_ParentType(t *testing.T) { t.Parallel() s := &scalarE{} s.setType(&scalarT{}) assert.Equal(t, s.typ, s.ParentType()) } func TestScalarE_ProtoType(t *testing.T) { t.Parallel() s := &scalarE{ptype: ProtoType(descriptor.FieldDescriptorProto_TYPE_BYTES)} assert.Equal(t, s.ptype, s.ProtoType()) } func TestScalarE_IsEmbed(t *testing.T) { t.Parallel() assert.False(t, (&scalarE{}).IsEmbed()) } func TestScalarE_IsEnum(t *testing.T) { t.Parallel() assert.False(t, (&scalarE{}).IsEnum()) } func TestScalarE_Imports(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarE{}).Imports()) } func TestScalarE_Embed(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarE{}).Embed()) } func TestScalarE_Enum(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarE{}).Enum()) } func TestEnumE_IsEnum(t *testing.T) { t.Parallel() assert.True(t, (&enumE{}).IsEnum()) } func TestEnumE_Enum(t *testing.T) { t.Parallel() e := &enumE{enum: dummyEnum()} assert.Equal(t, e.enum, e.Enum()) } func TestEnumE_Imports(t *testing.T) { t.Parallel() en := dummyEnum() f := dummyFile() en.parent = f e := &enumE{scalarE: &scalarE{}, enum: en} fld := dummyField() e.typ = fld.typ assert.Empty(t, e.Imports()) f.desc.Name = proto.String("some/other/file.proto") assert.Len(t, e.Imports(), 1) assert.Equal(t, e.Enum().File(), e.Imports()[0]) } func TestEmbedE_IsEmbed(t *testing.T) { t.Parallel() assert.True(t, (&embedE{}).IsEmbed()) } func TestEmbedE_Embed(t *testing.T) { t.Parallel() e := &embedE{msg: dummyMsg()} assert.Equal(t, e.msg, e.Embed()) } func TestEmbedE_Imports(t *testing.T) { t.Parallel() f := dummyFile() msg := dummyMsg() msg.parent = f e := &embedE{scalarE: &scalarE{}, msg: msg} fld := dummyField() e.typ = fld.typ assert.Empty(t, e.Imports()) f.desc.Name = proto.String("some/other/file.proto") assert.Len(t, e.Imports(), 1) assert.Equal(t, e.Embed().File(), e.Imports()[0]) } protoc-gen-star-2.0.3/field_type_test.go000066400000000000000000000131721440740147700202660ustar00rootroot00000000000000package pgs import ( "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestScalarT_Field(t *testing.T) { t.Parallel() f := dummyField() s := &scalarT{} f.addType(s) assert.Equal(t, f, s.Field()) } func TestScalarT_IsRepeated(t *testing.T) { t.Parallel() s := &scalarT{} assert.False(t, s.IsRepeated()) } func TestScalarT_IsMap(t *testing.T) { t.Parallel() s := &scalarT{} assert.False(t, s.IsMap()) } func TestScalarT_IsEnum(t *testing.T) { t.Parallel() s := &scalarT{} assert.False(t, s.IsEnum()) } func TestScalarT_IsEmbed(t *testing.T) { t.Parallel() s := &scalarT{} assert.False(t, s.IsEmbed()) } func TestScalarT_ProtoType(t *testing.T) { t.Parallel() f := dummyField() s := &scalarT{} f.addType(s) assert.Equal(t, f.desc.GetType(), s.ProtoType().Proto()) } func TestScalarT_ProtoLabel(t *testing.T) { t.Parallel() f := dummyField() s := &scalarT{} f.addType(s) assert.Equal(t, f.desc.GetLabel(), s.ProtoLabel().Proto()) } func TestScalarT_Imports(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarT{}).Imports()) } func TestScalarT_Enum(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarT{}).Enum()) } func TestScalarT_Embed(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarT{}).Embed()) } func TestScalarT_Element(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarT{}).Element()) } func TestScalarT_Key(t *testing.T) { t.Parallel() assert.Nil(t, (&scalarT{}).Key()) } func TestScalarT_IsOptional(t *testing.T) { t.Parallel() s := &scalarT{} f := dummyOneOfField(true) f.addType(s) assert.True(t, s.IsOptional()) fl := dummyFile() fl.desc.Syntax = nil f.Message().setParent(fl) assert.True(t, s.IsOptional()) req := descriptor.FieldDescriptorProto_LABEL_REQUIRED f.desc.Label = &req assert.False(t, s.IsOptional()) } func TestScalarT_IsNotOptional(t *testing.T) { t.Parallel() s := &scalarT{} f := dummyField() f.addType(s) assert.True(t, s.IsOptional()) fl := dummyFile() fl.desc.Syntax = nil f.Message().setParent(fl) assert.True(t, s.IsOptional()) req := descriptor.FieldDescriptorProto_LABEL_REQUIRED f.desc.Label = &req assert.False(t, s.IsOptional()) } func TestScalarT_IsRequired(t *testing.T) { t.Parallel() s := &scalarT{} f := dummyField() f.addType(s) assert.False(t, s.IsRequired()) fl := dummyFile() fl.desc.Syntax = nil f.Message().setParent(fl) assert.False(t, s.IsRequired()) req := descriptor.FieldDescriptorProto_LABEL_REQUIRED f.desc.Label = &req assert.True(t, s.IsRequired()) } func TestScalarT_ToElem(t *testing.T) { t.Parallel() s := &scalarT{} f := dummyField() f.addType(s) el := s.toElem() assert.Equal(t, s, el.ParentType()) assert.Equal(t, s.ProtoType(), el.ProtoType()) } func TestEnumT_Enum(t *testing.T) { t.Parallel() e := &enumT{enum: &enum{}} assert.Equal(t, e.enum, e.Enum()) } func TestEnumT_IsEnum(t *testing.T) { t.Parallel() e := &enumT{} assert.True(t, e.IsEnum()) } func TestEnumT_Imports(t *testing.T) { t.Parallel() f := dummyFile() en := dummyEnum() en.parent = f e := &enumT{scalarT: &scalarT{}, enum: en} fld := dummyField() fld.addType(e) assert.Empty(t, e.Imports()) f.desc.Name = proto.String("some/other/file.proto") assert.Len(t, e.Imports(), 1) assert.Equal(t, e.enum.File(), e.Imports()[0]) } func TestEnumT_ToElem(t *testing.T) { t.Parallel() e := &enumT{ scalarT: &scalarT{}, enum: dummyEnum(), } f := dummyField() f.addType(e) el := e.toElem() assert.True(t, el.IsEnum()) assert.Equal(t, e.enum, el.Enum()) assert.Equal(t, e.ProtoType(), el.ProtoType()) } func TestEmbedT_IsEmbed(t *testing.T) { t.Parallel() e := &embedT{} assert.True(t, e.IsEmbed()) } func TestEmbedT_Embed(t *testing.T) { t.Parallel() e := &embedT{msg: dummyMsg()} assert.Equal(t, e.msg, e.Embed()) } func TestEmbedT_Imports(t *testing.T) { t.Parallel() msg := dummyMsg() f := dummyFile() msg.parent = f e := &embedT{scalarT: &scalarT{}, msg: msg} dummyField().addType(e) assert.Empty(t, e.Imports()) f.desc.Name = proto.String("some/other/file.proto") assert.Len(t, e.Imports(), 1) assert.Equal(t, e.msg.File(), e.Imports()[0]) } func TestEmbedT_ToElem(t *testing.T) { t.Parallel() e := &embedT{ scalarT: &scalarT{}, msg: dummyMsg(), } f := dummyField() f.addType(e) el := e.toElem() assert.True(t, el.IsEmbed()) assert.Equal(t, e.msg, el.Embed()) assert.Equal(t, e.ProtoType(), el.ProtoType()) } func TestRepT_IsRepeated(t *testing.T) { t.Parallel() r := &repT{} assert.True(t, r.IsRepeated()) } func TestRepT_Element(t *testing.T) { t.Parallel() r := &repT{el: &scalarE{}} assert.Equal(t, r.el, r.Element()) } func TestRepT_Imports(t *testing.T) { t.Parallel() msg := dummyMsg() f := dummyFile() msg.parent = f e := &embedT{scalarT: &scalarT{}, msg: msg} dummyField().addType(e) fld := dummyField() r := &repT{scalarT: &scalarT{}, el: e.toElem()} fld.addType(r) assert.Empty(t, r.Imports()) f.desc.Name = proto.String("some/other/file.proto") assert.Len(t, r.Imports(), 1) assert.Equal(t, r.el.Embed().File(), r.Imports()[0]) } func TestRepT_ToElem(t *testing.T) { t.Parallel() assert.Panics(t, func() { (&repT{}).toElem() }) } func TestMapT_IsRepeated(t *testing.T) { t.Parallel() assert.False(t, (&mapT{}).IsRepeated()) } func TestMapT_IsMap(t *testing.T) { t.Parallel() assert.True(t, (&mapT{}).IsMap()) } func TestMapT_Key(t *testing.T) { t.Parallel() m := &mapT{key: &scalarE{}} assert.Equal(t, m.key, m.Key()) } type mockT struct { FieldType i []File f Field } func (t *mockT) Imports() []File { return t.i } func (t *mockT) setField(f Field) { t.f = f } protoc-gen-star-2.0.3/file.go000066400000000000000000000155431440740147700160260ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // File describes the contents of a single proto file. type File interface { ParentEntity // InputPath returns the input FilePath. This is equivalent to the value // returned by Name. InputPath() FilePath // Descriptor returns the underlying descriptor for the proto file Descriptor() *descriptor.FileDescriptorProto // TransitiveImports returns all direct and transitive dependencies of this // File. Use Imports to obtain only direct dependencies. TransitiveImports() []File // UnusedImports returns all imported files that aren't used by the current // File. Public imports are not included in this list. UnusedImports() []File // Dependents returns all files where the given file was directly or // transitively imported. Dependents() []File // Services returns the services from this proto file. Services() []Service // SyntaxSourceCodeInfo returns the comment info attached to the `syntax` // stanza of the file. This method is an alias of the SourceCodeInfo method. SyntaxSourceCodeInfo() SourceCodeInfo // PackageSourceCodeInfo returns the comment info attached to the `package` // stanza of the file. PackageSourceCodeInfo() SourceCodeInfo setPackage(p Package) addFileDependency(fl File) addDependent(fl File) addService(s Service) addPackageSourceCodeInfo(info SourceCodeInfo) } type file struct { desc *descriptor.FileDescriptorProto fqn string pkg Package enums []Enum defExts []Extension dependents []File dependentsCache []File fileDependencies []File msgs []Message srvs []Service buildTarget bool syntaxInfo, packageInfo SourceCodeInfo } func (f *file) Name() Name { return Name(f.desc.GetName()) } func (f *file) FullyQualifiedName() string { return f.fqn } func (f *file) Syntax() Syntax { return Syntax(f.desc.GetSyntax()) } func (f *file) Package() Package { return f.pkg } func (f *file) File() File { return f } func (f *file) BuildTarget() bool { return f.buildTarget } func (f *file) Descriptor() *descriptor.FileDescriptorProto { return f.desc } func (f *file) InputPath() FilePath { return FilePath(f.Name().String()) } func (f *file) MapEntries() (me []Message) { return nil } func (f *file) SourceCodeInfo() SourceCodeInfo { return f.SyntaxSourceCodeInfo() } func (f *file) SyntaxSourceCodeInfo() SourceCodeInfo { return f.syntaxInfo } func (f *file) PackageSourceCodeInfo() SourceCodeInfo { return f.packageInfo } func (f *file) Enums() []Enum { return f.enums } func (f *file) AllEnums() []Enum { es := f.Enums() for _, m := range f.msgs { es = append(es, m.AllEnums()...) } return es } func (f *file) Messages() []Message { return f.msgs } func (f *file) AllMessages() []Message { msgs := f.Messages() for _, m := range f.msgs { msgs = append(msgs, m.AllMessages()...) } return msgs } func (f *file) Services() []Service { return f.srvs } func (f *file) Imports() []File { out := make([]File, len(f.fileDependencies)) copy(out, f.fileDependencies) return out } func (f *file) TransitiveImports() []File { importMap := make(map[string]File, len(f.fileDependencies)) for _, fl := range f.fileDependencies { importMap[fl.Name().String()] = fl for _, imp := range fl.TransitiveImports() { importMap[imp.File().Name().String()] = imp } } out := make([]File, 0, len(importMap)) for _, imp := range importMap { out = append(out, imp) } return out } func (f *file) UnusedImports() []File { public := make(map[int]struct{}, len(f.desc.PublicDependency)) for _, i := range f.desc.PublicDependency { public[int(i)] = struct{}{} } mp := make(map[string]File, len(f.fileDependencies)) for i, fl := range f.fileDependencies { if _, ok := public[i]; ok { continue } mp[fl.Name().String()] = fl } for _, msg := range f.AllMessages() { for _, imp := range msg.Imports() { delete(mp, imp.Name().String()) } } for _, svc := range f.Services() { for _, imp := range svc.Imports() { delete(mp, imp.Name().String()) } } out := make([]File, 0, len(mp)) for _, fl := range mp { out = append(out, fl) } return out } func (f *file) Dependents() []File { if f.dependentsCache == nil { set := make(map[string]File) for _, fl := range f.dependents { set[fl.Name().String()] = fl for _, d := range fl.Dependents() { set[d.Name().String()] = d } } f.dependentsCache = make([]File, 0, len(set)) for _, d := range set { f.dependentsCache = append(f.dependentsCache, d) } } return f.dependentsCache } func (f *file) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (bool, error) { return extension(f.desc.GetOptions(), desc, &ext) } func (f *file) DefinedExtensions() []Extension { return f.defExts } func (f *file) accept(v Visitor) (err error) { if v == nil { return nil } if v, err = v.VisitFile(f); err != nil || v == nil { return } for _, e := range f.enums { if err = e.accept(v); err != nil { return } } for _, m := range f.msgs { if err = m.accept(v); err != nil { return } } for _, s := range f.srvs { if err = s.accept(v); err != nil { return } } for _, ext := range f.defExts { if err = ext.accept(v); err != nil { return } } return } func (f *file) addDefExtension(ext Extension) { f.defExts = append(f.defExts, ext) } func (f *file) setPackage(pkg Package) { f.pkg = pkg } func (f *file) addEnum(e Enum) { e.setParent(f) f.enums = append(f.enums, e) } func (f *file) addFileDependency(fl File) { f.fileDependencies = append(f.fileDependencies, fl) } func (f *file) addDependent(fl File) { f.dependents = append(f.dependents, fl) } func (f *file) addMessage(m Message) { m.setParent(f) f.msgs = append(f.msgs, m) } func (f *file) addService(s Service) { s.setFile(f) f.srvs = append(f.srvs, s) } func (f *file) addMapEntry(m Message) { panic("cannot add map entry directly to file") } func (f *file) childAtPath(path []int32) Entity { switch { case len(path) == 0: return f case len(path)%2 == 1: // all declaration paths are multiples of two return nil } var child Entity switch path[0] { case messageTypePath: child = f.msgs[path[1]] case enumTypePath: child = f.enums[path[1]] case servicePath: child = f.srvs[path[1]] default: return nil } return child.childAtPath(path[2:]) } func (f *file) addSourceCodeInfo(info SourceCodeInfo) { f.syntaxInfo = info } func (f *file) addPackageSourceCodeInfo(info SourceCodeInfo) { f.packageInfo = info } var _ File = (*file)(nil) protoc-gen-star-2.0.3/file_test.go000066400000000000000000000157011440740147700170610ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestFile_Name(t *testing.T) { t.Parallel() f := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} assert.Equal(t, Name("foobar"), f.Name()) } func TestFile_FullyQualifiedName(t *testing.T) { t.Parallel() f := &file{fqn: "foo"} assert.Equal(t, f.fqn, f.FullyQualifiedName()) } func TestFile_Syntax(t *testing.T) { t.Parallel() f := &file{desc: &descriptor.FileDescriptorProto{}} assert.Equal(t, Proto2, f.Syntax()) } func TestFile_Package(t *testing.T) { t.Parallel() f := &file{pkg: &pkg{comments: "fizz/buzz"}} assert.Equal(t, f.pkg, f.Package()) } func TestFile_File(t *testing.T) { t.Parallel() f := &file{buildTarget: true} assert.Equal(t, f, f.File()) } func TestFile_BuildTarget(t *testing.T) { t.Parallel() f := &file{buildTarget: true} assert.True(t, f.BuildTarget()) f.buildTarget = false assert.False(t, f.BuildTarget()) } func TestFile_Descriptor(t *testing.T) { t.Parallel() f := &file{desc: &descriptor.FileDescriptorProto{}} assert.Equal(t, f.desc, f.Descriptor()) } func TestFile_InputPath(t *testing.T) { t.Parallel() f := &file{desc: &descriptor.FileDescriptorProto{Name: proto.String("foo.bar")}} assert.Equal(t, "foo.bar", f.InputPath().String()) } func TestFile_Enums(t *testing.T) { t.Parallel() f := &file{} assert.Empty(t, f.Enums()) e := &enum{} f.addEnum(e) assert.Len(t, f.Enums(), 1) assert.Equal(t, e, f.Enums()[0]) } func TestFile_AllEnums(t *testing.T) { t.Parallel() f := &file{} assert.Empty(t, f.AllEnums()) f.addEnum(&enum{}) m := &msg{} m.addEnum(&enum{}) f.addMessage(m) assert.Len(t, f.Enums(), 1) assert.Len(t, f.AllEnums(), 2) } func TestFile_Messages(t *testing.T) { t.Parallel() f := &file{} assert.Empty(t, f.Messages()) m := &msg{} f.addMessage(m) assert.Len(t, f.Messages(), 1) assert.Equal(t, m, f.Messages()[0]) } func TestFile_MapEntries(t *testing.T) { t.Parallel() f := &file{} assert.Panics(t, func() { f.addMapEntry(&msg{}) }) assert.Empty(t, f.MapEntries()) } func TestFile_AllMessages(t *testing.T) { t.Parallel() f := &file{} assert.Empty(t, f.AllMessages()) m := &msg{} m.addMessage(&msg{}) f.addMessage(m) assert.Len(t, f.Messages(), 1) assert.Len(t, f.AllMessages(), 2) } func TestFile_Services(t *testing.T) { t.Parallel() f := &file{} assert.Empty(t, f.Services()) s := &service{} f.addService(s) assert.Len(t, f.Services(), 1) assert.Equal(t, s, f.Services()[0]) } func TestFile_Imports(t *testing.T) { t.Parallel() flDep := dummyFile() nf := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} flDep.addFileDependency(nf) f := &file{} assert.Empty(t, f.Imports()) f.addFileDependency(flDep) assert.Len(t, f.Imports(), 1) } func TestFile_TransitiveImports(t *testing.T) { t.Parallel() flDep := dummyFile() nf := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} flDep.addFileDependency(nf) f := &file{} assert.Empty(t, f.TransitiveImports()) f.addFileDependency(flDep) assert.Len(t, f.TransitiveImports(), 2) } func TestFile_UnusedImports(t *testing.T) { t.Parallel() target := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} unusedFile := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("i/am/unused.proto"), }} target.addFileDependency(unusedFile) publicFile := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("i/am/public.proto"), }} target.addFileDependency(publicFile) target.desc.PublicDependency = append(target.desc.PublicDependency, 1) msgDep := dummyMsg() usedFile := msgDep.File().(*file) ft := &embedT{scalarT: &scalarT{}, msg: msgDep} fld := &field{} fld.addType(ft) m := &msg{} m.addField(fld) target.addMessage(m) mtd := &method{in: msgDep, out: m} svc := &service{} svc.addMethod(mtd) target.addService(svc) target.addFileDependency(usedFile) unused := target.UnusedImports() assert.Len(t, unused, 1) assert.Equal(t, unusedFile, unused[0]) } func TestFile_Dependents(t *testing.T) { t.Parallel() f := &file{} fl := dummyFile() f.addDependent(fl) deps := f.Dependents() assert.Len(t, deps, 1) assert.Contains(t, deps, fl) } func TestFile_Accept(t *testing.T) { t.Parallel() f := &file{} assert.Nil(t, f.accept(nil)) v := &mockVisitor{} assert.NoError(t, f.accept(v)) assert.Equal(t, 1, v.file) v.Reset() v.v = v v.err = errors.New("foo") assert.Equal(t, v.err, f.accept(v)) assert.Equal(t, 1, v.file) assert.Zero(t, v.enum) assert.Zero(t, v.message) assert.Zero(t, v.service) assert.Zero(t, v.extension) v.Reset() f.addEnum(&enum{}) f.addMessage(&msg{}) f.addService(&service{}) f.addDefExtension(&ext{}) assert.NoError(t, f.accept(v)) assert.Equal(t, 1, v.file) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.message) assert.Equal(t, 1, v.service) assert.Equal(t, 1, v.extension) v.Reset() f.addDefExtension(&mockExtension{err: errors.New("fizz")}) assert.EqualError(t, f.accept(v), "fizz") assert.Equal(t, 1, v.file) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.message) assert.Equal(t, 1, v.service) assert.Equal(t, 2, v.extension) v.Reset() f.addService(&mockService{err: errors.New("fizz")}) assert.EqualError(t, f.accept(v), "fizz") assert.Equal(t, 1, v.file) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.message) assert.Equal(t, 2, v.service) assert.Zero(t, v.extension) v.Reset() f.addMessage(&mockMessage{err: errors.New("bar")}) assert.EqualError(t, f.accept(v), "bar") assert.Equal(t, 1, v.file) assert.Equal(t, 1, v.enum) assert.Equal(t, 2, v.message) assert.Zero(t, v.service) assert.Zero(t, v.extension) v.Reset() f.addEnum(&mockEnum{err: errors.New("baz")}) assert.EqualError(t, f.accept(v), "baz") assert.Equal(t, 1, v.file) assert.Equal(t, 2, v.enum) assert.Zero(t, v.message) assert.Zero(t, v.service) assert.Zero(t, v.extension) } func TestFile_Extension(t *testing.T) { // cannot be parallel assert.NotPanics(t, func() { (&file{ desc: &descriptor.FileDescriptorProto{}, }).Extension(nil, nil) }) } func TestFile_DefinedExtensions(t *testing.T) { t.Parallel() f := &file{} assert.Empty(t, f.DefinedExtensions()) ext := &ext{} f.addDefExtension(ext) assert.Len(t, f.DefinedExtensions(), 1) } // needed to wrap since there is a File method type mFile interface { File } type mockFile struct { mFile pkg Package err error } func (f *mockFile) setPackage(p Package) { f.pkg = p } func (f *mockFile) accept(v Visitor) error { _, err := v.VisitFile(f) if f.err != nil { return f.err } return err } func dummyFile() *file { pkg := dummyPkg() f := &file{ pkg: pkg, desc: &descriptor.FileDescriptorProto{ Package: proto.String(pkg.ProtoName().String()), Syntax: proto.String(string(Proto3)), Name: proto.String("file.proto"), }, } pkg.addFile(f) return f } protoc-gen-star-2.0.3/generator.go000066400000000000000000000052531440740147700170720ustar00rootroot00000000000000package pgs import ( "io" "log" "os" ) // Generator configures and executes a protoc plugin's lifecycle. type Generator struct { Debugger persister persister // handles writing artifacts to their output workflow workflow mods []Module // registered pg* modules in io.Reader // protoc input reader out io.Writer // protoc output writer debug bool // whether or not to print debug messages params Parameters // CLI parameters passed in from protoc paramMutators []ParamMutator // registered param mutators } // Init configures a new Generator. InitOptions may be provided as well to // modify the behavior of the generator. func Init(opts ...InitOption) *Generator { g := &Generator{ in: os.Stdin, out: os.Stdout, persister: newPersister(), workflow: &onceWorkflow{workflow: &standardWorkflow{}}, } for _, opt := range opts { opt(g) } g.Debugger = initDebugger(g.debug, log.New(os.Stderr, "", 0)) g.persister.SetDebugger(g.Debugger) return g } // RegisterModule should be called before Render to attach a custom Module to // the Generator. This method can be called multiple times. func (g *Generator) RegisterModule(m ...Module) *Generator { for _, mod := range m { g.Assert(mod != nil, "nil module provided") g.Debug("registering module: ", mod.Name()) } g.mods = append(g.mods, m...) return g } // RegisterPostProcessor should be called before Render to attach // PostProcessors to the Generator. This method can be called multiple times. // PostProcessors are executed against their matches in the order in which they // are registered. func (g *Generator) RegisterPostProcessor(p ...PostProcessor) *Generator { for _, pp := range p { g.Assert(pp != nil, "nil post-processor provided") } g.persister.AddPostProcessor(p...) return g } // AST returns the constructed AST graph from the gatherer. This method is // idempotent, can be called multiple times (before and after calls to Render, // even), and is particularly useful in testing. func (g *Generator) AST() AST { return g.workflow.Init(g) } // Render executes the protoc plugin flow, gathering the AST from the input // io.Reader (typically stdin via protoc), running all the registered modules, // and persisting the generated artifacts to the output io.Writer (typically // stdout to protoc + direct file system writes for custom artifacts). This // method is idempotent, in that subsequent calls to Render will have no // effect. func (g *Generator) Render() { ast := g.workflow.Init(g) arts := g.workflow.Run(ast) g.workflow.Persist(arts) } func (g *Generator) push(prefix string) { g.Debugger = g.Push(prefix) } func (g *Generator) pop() { g.Debugger = g.Pop() } protoc-gen-star-2.0.3/generator_test.go000066400000000000000000000044501440740147700201270ustar00rootroot00000000000000package pgs import ( "bytes" "os" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" plugin_go "google.golang.org/protobuf/types/pluginpb" ) func TestInit(t *testing.T) { t.Parallel() b := &bytes.Buffer{} bb := &bytes.Buffer{} g := Init(ProtocInput(b), ProtocOutput(bb), func(g *Generator) { /* noop */ }) assert.NotNil(t, g) assert.Equal(t, g.in, b) assert.Equal(t, g.out, bb) g = Init() assert.Equal(t, os.Stdin, g.in) assert.Equal(t, os.Stdout, g.out) _, ok := g.workflow.(*onceWorkflow) assert.True(t, ok) } func TestGenerator_RegisterModule(t *testing.T) { t.Parallel() d := InitMockDebugger() g := &Generator{Debugger: d} assert.Empty(t, g.mods) g.RegisterModule(&mockModule{name: "foo"}) assert.False(t, d.Failed()) assert.Len(t, g.mods, 1) assert.Panics(t, func() { g.RegisterModule(nil) }) assert.True(t, d.Failed()) } func TestGenerator_RegisterPostProcessor(t *testing.T) { t.Parallel() d := InitMockDebugger() p := newPersister() g := &Generator{Debugger: d, persister: p} pp := &mockPP{} assert.Empty(t, p.procs) g.RegisterPostProcessor(pp) assert.False(t, d.Failed()) assert.Len(t, p.procs, 1) g.RegisterPostProcessor(nil) assert.True(t, d.Failed()) } func TestGenerator_AST(t *testing.T) { t.Parallel() g := Init() wf := &dummyWorkflow{AST: new(graph)} g.workflow = wf assert.Equal(t, wf.AST, g.AST()) assert.True(t, wf.initted) } func TestGenerator_Render(t *testing.T) { // cannot be parallel req := &plugin_go.CodeGeneratorRequest{ FileToGenerate: []string{"foo"}, ProtoFile: []*descriptor.FileDescriptorProto{ { Name: proto.String("foo"), Syntax: proto.String("proto2"), Package: proto.String("bar"), }, }, } b, err := proto.Marshal(req) assert.NoError(t, err) buf := &bytes.Buffer{} g := Init(ProtocInput(bytes.NewReader(b)), ProtocOutput(buf)) assert.NotPanics(t, g.Render) var res plugin_go.CodeGeneratorResponse assert.NoError(t, proto.Unmarshal(buf.Bytes(), &res)) } func TestGenerator_PushPop(t *testing.T) { t.Parallel() g := Init() g.push("foo") pd, ok := g.Debugger.(prefixedDebugger) assert.True(t, ok) assert.Equal(t, "[foo]", pd.prefix) g.pop() _, ok = g.Debugger.(rootDebugger) assert.True(t, ok) } protoc-gen-star-2.0.3/go.mod000066400000000000000000000010401440740147700156510ustar00rootroot00000000000000module github.com/lyft/protoc-gen-star/v2 go 1.17 require ( github.com/spf13/afero v1.3.3 github.com/stretchr/testify v1.6.1 golang.org/x/tools v0.1.12 google.golang.org/protobuf v1.23.0 ) require ( github.com/davecgh/go-spew v1.1.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect golang.org/x/text v0.3.7 // indirect gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect ) protoc-gen-star-2.0.3/go.sum000066400000000000000000000144671440740147700157170ustar00rootroot00000000000000github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/spf13/afero v1.3.3 h1:p5gZEKLYoL7wh8VrJesMaYeNxdEd1v3cb4irOk9zB54= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= protoc-gen-star-2.0.3/init_option.go000066400000000000000000000045721440740147700174420ustar00rootroot00000000000000package pgs import ( "io" "os" "github.com/spf13/afero" ) // An InitOption modifies the behavior of a Generator at initialization. type InitOption func(g *Generator) // ParamMutator is a method that modifies Parameters p in-place. These are // typically applied before code generation begins, and configurable via the // MutateParams InitOption. type ParamMutator func(p Parameters) // ProtocInput changes the input io.Reader source. This value is where the // serialized CodeGeneratorRequest is received from protoc. By default, // os.Stdin is used. func ProtocInput(r io.Reader) InitOption { return func(g *Generator) { g.in = r } } // ProtocOutput changes the output io.Writer destination. This value is where // the serialized CodeGeneratorResponse is sent to protoc. By default, // os.Stdout is used. func ProtocOutput(w io.Writer) InitOption { return func(g *Generator) { g.out = w } } // DebugMode enables verbose logging for module development and debugging. func DebugMode() InitOption { return func(g *Generator) { g.debug = true } } // DebugEnv enables verbose logging only if the passed in environment variable // is non-empty. func DebugEnv(f string) InitOption { return func(g *Generator) { g.debug = os.Getenv(f) != "" } } // MutateParams applies pm to the parameters passed in from protoc. func MutateParams(pm ...ParamMutator) InitOption { return func(g *Generator) { g.paramMutators = append(g.paramMutators, pm...) } } // FileSystem overrides the default file system used to write Artifacts to // disk. By default, the OS's file system is used. This option currently only // impacts CustomFile and CustomTemplateFile artifacts generated by modules. func FileSystem(fs afero.Fs) InitOption { return func(g *Generator) { g.persister.SetFS(fs) } } // BiDirectional instructs the Generator to build the AST graph in both // directions (ie, accessing dependents of an entity, not just dependencies). func BiDirectional() InitOption { return func(g *Generator) { g.workflow = &onceWorkflow{workflow: &standardWorkflow{BiDi: true}} } } // SupportedFeatures allows defining protoc features to enable / disable. // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/implementing_proto3_presence.md#signaling-that-your-code-generator-supports-proto3-optional func SupportedFeatures(feat *uint64) InitOption { return func(g *Generator) { g.persister.SetSupportedFeatures(feat) } } protoc-gen-star-2.0.3/init_option_test.go000066400000000000000000000026351440740147700204770ustar00rootroot00000000000000package pgs import ( "bytes" "math/rand" "os" "strconv" "testing" "github.com/spf13/afero" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestDebugMode(t *testing.T) { t.Parallel() g := &Generator{} assert.False(t, g.debug) DebugMode()(g) assert.True(t, g.debug) } func TestDebugEnv(t *testing.T) { t.Parallel() g := &Generator{} assert.False(t, g.debug) e := strconv.Itoa(rand.Int()) DebugEnv(e)(g) assert.False(t, g.debug) assert.NoError(t, os.Setenv(e, "1")) DebugEnv(e)(g) assert.True(t, g.debug) } func TestFileSystem(t *testing.T) { t.Parallel() p := dummyPersister(InitMockDebugger()) g := &Generator{persister: p} fs := afero.NewMemMapFs() FileSystem(fs)(g) assert.Equal(t, fs, p.fs) } func TestProtocInput(t *testing.T) { t.Parallel() g := &Generator{} assert.Nil(t, g.in) b := &bytes.Buffer{} ProtocInput(b)(g) assert.Equal(t, b, g.in) } func TestProtocOutput(t *testing.T) { t.Parallel() g := &Generator{} assert.Nil(t, g.out) b := &bytes.Buffer{} ProtocOutput(b)(g) assert.Equal(t, b, g.out) } func TestBiDirectional(t *testing.T) { t.Parallel() g := &Generator{} assert.Nil(t, g.workflow) BiDirectional()(g) wf := g.workflow require.IsType(t, &onceWorkflow{}, wf) once := wf.(*onceWorkflow) require.IsType(t, &standardWorkflow{}, once.workflow) std := once.workflow.(*standardWorkflow) assert.True(t, std.BiDi) } protoc-gen-star-2.0.3/lang/000077500000000000000000000000001440740147700154715ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/000077500000000000000000000000001440740147700160765ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/Makefile000066400000000000000000000033241440740147700175400ustar00rootroot00000000000000.PHONY: testdata-go-names testdata-names: ../../bin/protoc-gen-debug # parse the proto file sets in testdata/names and renders binary CodeGeneratorRequest + official go codegen cd testdata/names && \ set -e; for subdir in `find . -mindepth 1 -maxdepth 1 -type d`; do \ cd $$subdir; \ params=`cat params`; \ protoc -I . \ --plugin=protoc-gen-debug=../../../../../bin/protoc-gen-debug \ --debug_out=".:." \ --go_out="plugins,paths=source_relative,$$params:." \ `find . -name "*.proto"`; \ cd -; \ done testdata-packages: ../../bin/protoc-gen-debug cd testdata/packages && \ set -e; for subdir in `find . -mindepth 1 -maxdepth 1 -type d | grep -v targets`; do \ cd $$subdir; \ params=`cat params`; \ protoc -I . -I .. \ --plugin=protoc-gen-debug=../../../../../bin/protoc-gen-debug \ --debug_out=".:." \ --go_out="paths=source_relative,$$params:." \ `find . -name "*.proto"`; \ cd -; \ done testdata-outputs: ../../bin/protoc-gen-debug cd testdata/outputs && \ set -e; for subdir in `find . -mindepth 1 -maxdepth 1 -type d`; do \ cd $$subdir; \ params=`cat params`; \ protoc -I . -I .. \ --plugin=protoc-gen-debug=../../../../../bin/protoc-gen-debug \ --debug_out=".:." \ --go_out="$$params:." \ `find . -name "*.proto"`; \ cd -; \ done testdata-presence: ../../bin/protoc-gen-debug cd testdata/presence && \ set -e; for subdir in `find . -mindepth 1 -maxdepth 1 -type d`; do \ cd $$subdir; \ params=`cat params`; \ protoc -I . -I .. \ --plugin=protoc-gen-debug=../../../../../bin/protoc-gen-debug \ --debug_out=".:." \ --go_out="$$params:." \ `find . -name "*.proto"`; \ cd -; \ done ../../bin/protoc-gen-debug: cd ../.. && $(MAKE) bin/protoc-gen-debug protoc-gen-star-2.0.3/lang/go/camel.go000066400000000000000000000052361440740147700175140ustar00rootroot00000000000000package pgsgo import ( pgs "github.com/lyft/protoc-gen-star/v2" ) // PGGUpperCamelCase converts Name n to the protoc-gen-go defined upper // camelcase. The rules are slightly different from pgs.UpperCamelCase in that // leading underscores are converted to 'X', mid-string underscores followed by // lowercase letters are removed and the letter is capitalized, all other // punctuation is preserved. This method should be used when deriving names of // protoc-gen-go generated code (ie, message/service struct names and field // names). // // See: https://godoc.org/github.com/golang/protobuf/protoc-gen-go/generator#CamelCase func PGGUpperCamelCase(n pgs.Name) pgs.Name { return pgs.Name(camelCase(n.String())) } // Below copied from https://github.com/golang/protobuf/blob/d04d7b157bb510b1e0c10132224b616ac0e26b17/protoc-gen-go/generator/generator.go#L2640-L2685, // to fix deprecation warning: https://github.com/golang/protobuf/blob/b5de78c91d0d09482d65f0a96927631cd343d7bb/protoc-gen-go/generator/generator.go#L42-L47 // CamelCase returns the CamelCased name. // If there is an interior underscore followed by a lower case letter, // drop the underscore and convert the letter to upper case. // There is a remote possibility of this rewrite causing a name collision, // but it's so remote we're prepared to pretend it's nonexistent - since the // C++ generator lowercases names, it's extremely unlikely to have two fields // with different capitalizations. // In short, _my_field_name_2 becomes XMyFieldName_2. func camelCase(s string) string { if s == "" { return "" } t := make([]byte, 0, 32) i := 0 if s[0] == '_' { // Need a capital letter; drop the '_'. t = append(t, 'X') i++ } // Invariant: if the next letter is lower case, it must be converted // to upper case. // That is, we process a word at a time, where words are marked by _ or // upper case letter. Digits are treated as words. for ; i < len(s); i++ { c := s[i] if c == '_' && i+1 < len(s) && isASCIILower(s[i+1]) { continue // Skip the underscore in s. } if isASCIIDigit(c) { t = append(t, c) continue } // Assume we have a letter now - if not, it's a bogus identifier. // The next word is a sequence of characters that must start upper case. if isASCIILower(c) { c ^= ' ' // Make it a capital letter. } t = append(t, c) // Guaranteed not lower case. // Accept lower case sequence that follows. for i+1 < len(s) && isASCIILower(s[i+1]) { i++ t = append(t, s[i]) } } return string(t) } // Is c an ASCII lower-case letter? func isASCIILower(c byte) bool { return 'a' <= c && c <= 'z' } // Is c an ASCII digit? func isASCIIDigit(c byte) bool { return '0' <= c && c <= '9' } protoc-gen-star-2.0.3/lang/go/context.go000066400000000000000000000056541440740147700201230ustar00rootroot00000000000000package pgsgo import pgs "github.com/lyft/protoc-gen-star/v2" // Context resolves Go-specific language for Packages & Entities generated by // protoc-gen-go. The rules that drive the naming behavior are complicated, and // result from an interplay of the go_package file option, the proto package, // and the proto filename itself. Therefore, it is recommended that all proto // files that are targeting Go should include a fully qualified go_package // option. These must be consistent for all proto files that are intended to be // in the same Go package. type Context interface { // Params returns the Parameters associated with this context. Params() pgs.Parameters // Name returns the name of a Node as it would appear in the generation output // of protoc-gen-go. For each type, the following is returned: // // - Package: the Go package name // - File: the Go package name // - Message: the struct name // - Field: the field name on the Message struct // - OneOf: the field name on the Message struct // - Enum: the type name // - EnumValue: the constant name // - Service: the server interface name // - Method: the method name on the server and client interface // Name(node pgs.Node) pgs.Name // ServerName returns the name of the server interface for the Service. ServerName(service pgs.Service) pgs.Name // ClientName returns the name of the client interface for the Service. ClientName(service pgs.Service) pgs.Name // ServerStream returns the name of the grpc.ServerStream wrapper for this // method. This name is only used if client or server streaming is // implemented for this method. ServerStream(method pgs.Method) pgs.Name // OneofOption returns the struct name that wraps a OneOf option's value. These // messages contain one field, matching the value returned by Name for this // Field. OneofOption(field pgs.Field) pgs.Name // TypeName returns the type name of a Field as it would appear in the // generated message struct from protoc-gen-go. Fields from imported // packages will be prefixed with the package name. Type(field pgs.Field) TypeName // PackageName returns the name of the Node's package as it would appear in // Go source generated by the official protoc-gen-go plugin. PackageName(node pgs.Node) pgs.Name // ImportPath returns the Go import path for an entity as it would be // included in an import block in a Go file. This value is only appropriate // for Entities imported into a target file/package. ImportPath(entity pgs.Entity) pgs.FilePath // OutputPath returns the output path relative to the plugin's output destination OutputPath(entity pgs.Entity) pgs.FilePath } type context struct{ p pgs.Parameters } // InitContext configures a Context that should be used for deriving Go names // for all Packages and Entities. func InitContext(params pgs.Parameters) Context { return context{params} } func (c context) Params() pgs.Parameters { return c.p } protoc-gen-star-2.0.3/lang/go/context_test.go000066400000000000000000000004721440740147700211530ustar00rootroot00000000000000package pgsgo import ( "testing" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/stretchr/testify/assert" ) func TestContext_Params(t *testing.T) { t.Parallel() p := pgs.Parameters{} p.SetStr("foo", "bar") ctx := InitContext(p) params := ctx.Params() assert.Equal(t, "bar", params.Str("foo")) } protoc-gen-star-2.0.3/lang/go/docs.go000066400000000000000000000001421440740147700173520ustar00rootroot00000000000000// Package pgsgo contains Go-specific helpers for use with PG* based protoc-plugins package pgsgo protoc-gen-star-2.0.3/lang/go/gofmt.go000066400000000000000000000012371440740147700175440ustar00rootroot00000000000000package pgsgo import ( "go/format" "strings" pgs "github.com/lyft/protoc-gen-star/v2" ) type goFmt struct{} // GoFmt returns a PostProcessor that runs gofmt on any files ending in ".go" func GoFmt() pgs.PostProcessor { return goFmt{} } func (p goFmt) Match(a pgs.Artifact) bool { var n string switch a := a.(type) { case pgs.GeneratorFile: n = a.Name case pgs.GeneratorTemplateFile: n = a.Name case pgs.CustomFile: n = a.Name case pgs.CustomTemplateFile: n = a.Name default: return false } return strings.HasSuffix(n, ".go") } func (p goFmt) Process(in []byte) ([]byte, error) { return format.Source(in) } var _ pgs.PostProcessor = goFmt{} protoc-gen-star-2.0.3/lang/go/gofmt_test.go000066400000000000000000000024031440740147700205770ustar00rootroot00000000000000package pgsgo import ( "testing" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/stretchr/testify/assert" ) func TestGoFmt_Match(t *testing.T) { t.Parallel() pp := GoFmt() tests := []struct { n string a pgs.Artifact m bool }{ {"GenFile", pgs.GeneratorFile{Name: "foo.go"}, true}, {"GenFileNonGo", pgs.GeneratorFile{Name: "bar.txt"}, false}, {"GenTplFile", pgs.GeneratorTemplateFile{Name: "foo.go"}, true}, {"GenTplFileNonGo", pgs.GeneratorTemplateFile{Name: "bar.txt"}, false}, {"CustomFile", pgs.CustomFile{Name: "foo.go"}, true}, {"CustomFileNonGo", pgs.CustomFile{Name: "bar.txt"}, false}, {"CustomTplFile", pgs.CustomTemplateFile{Name: "foo.go"}, true}, {"CustomTplFileNonGo", pgs.CustomTemplateFile{Name: "bar.txt"}, false}, {"NonMatch", pgs.GeneratorAppend{FileName: "foo.go"}, false}, } for _, test := range tests { tc := test t.Run(tc.n, func(t *testing.T) { t.Parallel() assert.Equal(t, tc.m, pp.Match(tc.a)) }) } } func TestGoFmt_Process(t *testing.T) { t.Parallel() src := []byte("// test\n package foo\n\nvar bar int = 123\n") exp := []byte("// test\npackage foo\n\nvar bar int = 123\n") out, err := GoFmt().Process(src) assert.NoError(t, err) assert.Equal(t, exp, out) } protoc-gen-star-2.0.3/lang/go/goimports.go000066400000000000000000000014151440740147700204510ustar00rootroot00000000000000package pgsgo import ( "strings" "golang.org/x/tools/imports" pgs "github.com/lyft/protoc-gen-star/v2" ) type goImports struct{} // GoImports returns a PostProcessor that run goimports on any files ending . ".go" func GoImports() pgs.PostProcessor { return goImports{} } func (g goImports) Match(a pgs.Artifact) bool { var n string switch a := a.(type) { case pgs.GeneratorFile: n = a.Name case pgs.GeneratorTemplateFile: n = a.Name case pgs.CustomFile: n = a.Name case pgs.CustomTemplateFile: n = a.Name default: return false } return strings.HasSuffix(n, ".go") } func (g goImports) Process(in []byte) ([]byte, error) { // We do not want to give a filename here, ever. return imports.Process("", in, nil) } var _ pgs.PostProcessor = goImports{} protoc-gen-star-2.0.3/lang/go/goimports_test.go000066400000000000000000000025531440740147700215140ustar00rootroot00000000000000package pgsgo import ( "testing" "github.com/stretchr/testify/assert" pgs "github.com/lyft/protoc-gen-star/v2" ) func TestGoImports_Match(t *testing.T) { t.Parallel() pp := GoImports() tests := []struct { n string a pgs.Artifact m bool }{ {"GenFile", pgs.GeneratorFile{Name: "foo.go"}, true}, {"GenFileNonGo", pgs.GeneratorFile{Name: "bar.txt"}, false}, {"GenTplFile", pgs.GeneratorTemplateFile{Name: "foo.go"}, true}, {"GenTplFileNonGo", pgs.GeneratorTemplateFile{Name: "bar.txt"}, false}, {"CustomFile", pgs.CustomFile{Name: "foo.go"}, true}, {"CustomFileNonGo", pgs.CustomFile{Name: "bar.txt"}, false}, {"CustomTplFile", pgs.CustomTemplateFile{Name: "foo.go"}, true}, {"CustomTplFileNonGo", pgs.CustomTemplateFile{Name: "bar.txt"}, false}, {"NonMatch", pgs.GeneratorAppend{FileName: "foo.go"}, false}, } for _, test := range tests { tc := test t.Run(tc.n, func(t *testing.T) { t.Parallel() assert.Equal(t, tc.m, pp.Match(tc.a)) }) } } func TestGoImports_Process(t *testing.T) { t.Parallel() src := []byte("package foo\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n)\n\nfunc Hello() {\n\tfmt.Println(\"Hello\")\n}\n") exp := []byte("package foo\n\nimport (\n\t\"fmt\"\n)\n\nfunc Hello() {\n\tfmt.Println(\"Hello\")\n}\n") out, err := GoImports().Process(src) assert.NoError(t, err) assert.Equal(t, string(exp), string(out)) } protoc-gen-star-2.0.3/lang/go/helpers_test.go000066400000000000000000000024741440740147700211350ustar00rootroot00000000000000package pgsgo import ( "io/ioutil" "path/filepath" "strings" "testing" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) func readCodeGenReq(t *testing.T, dir ...string) *plugin_go.CodeGeneratorRequest { dirs := append(append([]string{"testdata"}, dir...), "code_generator_request.pb.bin") filename := filepath.Join(dirs...) data, err := ioutil.ReadFile(filename) require.NoError(t, err, "unable to read CDR at %q", filename) req := &plugin_go.CodeGeneratorRequest{} err = proto.Unmarshal(data, req) require.NoError(t, err, "unable to unmarshal CDR data at %q", filename) return req } func buildGraph(t *testing.T, dir ...string) pgs.AST { d := pgs.InitMockDebugger() ast := pgs.ProcessCodeGeneratorRequest(d, readCodeGenReq(t, dir...)) require.False(t, d.Failed(), "failed to build graph (see previous log statements)") return ast } func loadContext(t *testing.T, dir ...string) Context { dirs := append(append([]string{"testdata"}, dir...), "params") filename := filepath.Join(dirs...) data, err := ioutil.ReadFile(filename) require.NoError(t, err, "unable to read params at %q", filename) params := pgs.ParseParameters(strings.TrimSpace(string(data))) return InitContext(params) } protoc-gen-star-2.0.3/lang/go/name.go000066400000000000000000000056341440740147700173550ustar00rootroot00000000000000package pgsgo import ( "fmt" "unicode" "unicode/utf8" pgs "github.com/lyft/protoc-gen-star/v2" ) func (c context) Name(node pgs.Node) pgs.Name { // Message or Enum type ChildEntity interface { Name() pgs.Name Parent() pgs.ParentEntity } switch en := node.(type) { case pgs.Package: // the package name for the first file (should be consistent) return c.PackageName(en) case pgs.File: // the package name for this file return c.PackageName(en) case ChildEntity: // Message or Enum types, which may be nested if p, ok := en.Parent().(pgs.Message); ok { return joinChild(c.Name(p), en.Name()) } return PGGUpperCamelCase(en.Name()) case pgs.Field: // field names cannot conflict with other generated methods return replaceProtected(PGGUpperCamelCase(en.Name())) case pgs.OneOf: // oneof field names cannot conflict with other generated methods return replaceProtected(PGGUpperCamelCase(en.Name())) case pgs.EnumValue: // EnumValue are prefixed with the enum name if _, ok := en.Enum().Parent().(pgs.File); ok { return joinNames(c.Name(en.Enum()), en.Name()) } return joinNames(c.Name(en.Enum().Parent()), en.Name()) case pgs.Service: // always return the server name return c.ServerName(en) case pgs.Entity: // any other entity should be just upper-camel-cased return PGGUpperCamelCase(en.Name()) default: panic("unreachable") } } func (c context) OneofOption(field pgs.Field) pgs.Name { n := joinNames(c.Name(field.Message()), c.Name(field)) for _, msg := range field.Message().Messages() { if c.Name(msg) == n { return n + "_" } } for _, en := range field.Message().Enums() { if c.Name(en) == n { return n + "_" } } return n } func (c context) ServerName(s pgs.Service) pgs.Name { n := PGGUpperCamelCase(s.Name()) return pgs.Name(fmt.Sprintf("%sServer", n)) } func (c context) ClientName(s pgs.Service) pgs.Name { n := PGGUpperCamelCase(s.Name()) return pgs.Name(fmt.Sprintf("%sClient", n)) } func (c context) ServerStream(m pgs.Method) pgs.Name { s := PGGUpperCamelCase(m.Service().Name()) n := PGGUpperCamelCase(m.Name()) return joinNames(s, n) + "Server" } var protectedNames = map[pgs.Name]pgs.Name{ "Reset": "Reset_", "String": "String_", "ProtoMessage": "ProtoMessage_", "Marshal": "Marshal_", "Unmarshal": "Unmarshal_", "ExtensionRangeArray": "ExtensionRangeArray_", "ExtensionMap": "ExtensionMap_", "Descriptor": "Descriptor_", } func replaceProtected(n pgs.Name) pgs.Name { if use, protected := protectedNames[n]; protected { return use } return n } func joinChild(a, b pgs.Name) pgs.Name { if r, _ := utf8.DecodeRuneInString(b.String()); unicode.IsLetter(r) && unicode.IsLower(r) { return pgs.Name(fmt.Sprintf("%s%s", a, PGGUpperCamelCase(b))) } return joinNames(a, PGGUpperCamelCase(b)) } func joinNames(a, b pgs.Name) pgs.Name { return pgs.Name(fmt.Sprintf("%s_%s", a, b)) } protoc-gen-star-2.0.3/lang/go/name_test.go000066400000000000000000000153451440740147700204140ustar00rootroot00000000000000package pgsgo import ( "testing" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestPGGUpperCamelCase(t *testing.T) { t.Parallel() tests := []struct { in string ex string }{ {"foo_bar", "FooBar"}, {"myJSON", "MyJSON"}, {"PDFTemplate", "PDFTemplate"}, {"_my_field_name_2", "XMyFieldName_2"}, {"my.field", "My.field"}, {"my_Field", "My_Field"}, } for _, tc := range tests { assert.Equal(t, tc.ex, PGGUpperCamelCase(pgs.Name(tc.in)).String()) } } func TestName(t *testing.T) { t.Parallel() ast := buildGraph(t, "names", "entities") ctx := loadContext(t, "names", "entities") f := ast.Targets()["entities.proto"] assert.Equal(t, pgs.Name("entities"), ctx.Name(f)) assert.Equal(t, pgs.Name("entities"), ctx.Name(f.Package())) assert.Panics(t, func() { ctx.Name(nil) }) tests := []struct { entity string expected pgs.Name }{ // Top-Level Messages {"UpperCamelCaseMessage", "UpperCamelCaseMessage"}, {"lowerCamelCaseMessage", "LowerCamelCaseMessage"}, {"SCREAMING_SNAKE_CASE", "SCREAMING_SNAKE_CASE"}, {"Upper_Snake_Case", "Upper_Snake_Case"}, {"lower_snake_case", "LowerSnakeCase"}, {"lowercase", "Lowercase"}, {"UPPERCASE", "UPPERCASE"}, {"String", "String"}, {"MsgWith3dInside", "MsgWith3DInside"}, {"MsgEndsWith3d", "MsgEndsWith3D"}, // Nested Messages {"Nested.Message", "Nested_Message"}, {"Nested.String", "Nested_String"}, {"Nested.Message.Message", "Nested_Message_Message"}, {"Nested.lowerMsg", "NestedLowerMsg"}, // Enums {"UpperCamelCaseEnum", "UpperCamelCaseEnum"}, {"lowerCamelCaseEnum", "LowerCamelCaseEnum"}, {"SCREAMING_SNAKE_ENUM", "SCREAMING_SNAKE_ENUM"}, {"lower_snake_enum", "LowerSnakeEnum"}, {"Upper_Snake_Enum", "Upper_Snake_Enum"}, // EnumValues {"UpperCamelCaseEnum.SCREAMING_SNAKE_CASE_ENUM_VALUE", "UpperCamelCaseEnum_SCREAMING_SNAKE_CASE_ENUM_VALUE"}, {"UpperCamelCaseEnum.lower_snake_case_enum_value", "UpperCamelCaseEnum_lower_snake_case_enum_value"}, {"UpperCamelCaseEnum.Upper_Snake_Case_Enum_Value", "UpperCamelCaseEnum_Upper_Snake_Case_Enum_Value"}, {"UpperCamelCaseEnum.UpperCamelCaseEnumValue", "UpperCamelCaseEnum_UpperCamelCaseEnumValue"}, {"UpperCamelCaseEnum.lowerCamelCaseEnumValue", "UpperCamelCaseEnum_lowerCamelCaseEnumValue"}, {"lowerCamelCaseEnum.LCC_Value", "LowerCamelCaseEnum_LCC_Value"}, {"SCREAMING_SNAKE_ENUM.SS_Value", "SCREAMING_SNAKE_ENUM_SS_Value"}, {"lower_snake_enum.LS_Value", "LowerSnakeEnum_LS_Value"}, {"Upper_Snake_Enum.US_Value", "Upper_Snake_Enum_US_Value"}, // Nested Enums {"Nested.Enum", "Nested_Enum"}, {"Nested.Enum.VALUE", "Nested_VALUE"}, {"Nested.Message.Enum", "Nested_Message_Enum"}, {"Nested.Message.Enum.NESTED", "Nested_Message_NESTED"}, {"Nested.lowercase", "NestedLowercase"}, {"Nested.lowercase.lower", "Nested_lower"}, // Field Names {"Fields.lower_snake_case", "LowerSnakeCase"}, {"Fields.Upper_Snake_Case", "Upper_Snake_Case"}, {"Fields.SCREAMING_SNAKE_CASE", "SCREAMING_SNAKE_CASE"}, {"Fields.lowerCamelCase", "LowerCamelCase"}, {"Fields.UpperCamelCase", "UpperCamelCase"}, {"Fields.string", "String_"}, // OneOfs {"Oneofs.lower_snake_case", "LowerSnakeCase"}, {"Oneofs.Upper_Snake_Case", "Upper_Snake_Case"}, {"Oneofs.SCREAMING_SNAKE_CASE", "SCREAMING_SNAKE_CASE"}, {"Oneofs.lowerCamelCase", "LowerCamelCase"}, {"Oneofs.UpperCamelCase", "UpperCamelCase"}, {"Oneofs.string", "String_"}, {"Oneofs.oneof", "Oneof"}, // Services (always the Server name) {"UpperCamelService", "UpperCamelServiceServer"}, {"lowerCamelService", "LowerCamelServiceServer"}, {"lower_snake_service", "LowerSnakeServiceServer"}, {"Upper_Snake_Service", "Upper_Snake_ServiceServer"}, {"SCREAMING_SNAKE_SERVICE", "SCREAMING_SNAKE_SERVICEServer"}, {"reset", "ResetServer"}, // Methods {"Service.UpperCamel", "UpperCamel"}, {"Service.lowerCamel", "LowerCamel"}, {"Service.lower_snake", "LowerSnake"}, {"Service.Upper_Snake", "Upper_Snake"}, {"Service.SCREAMING_SNAKE", "SCREAMING_SNAKE"}, {"Service.Reset", "Reset"}, } for _, test := range tests { tc := test t.Run(tc.entity, func(t *testing.T) { t.Parallel() e, ok := ast.Lookup(".names.entities." + tc.entity) require.True(t, ok, "could not locate entity") assert.Equal(t, tc.expected, ctx.Name(e)) }) } } func TestContext_OneofOption(t *testing.T) { t.Parallel() ast := buildGraph(t, "names", "entities") ctx := loadContext(t, "names", "entities") tests := []struct { field string expected pgs.Name }{ {"LS", "Oneofs_LS"}, {"US", "Oneofs_US"}, {"SS", "Oneofs_SS"}, {"LC", "Oneofs_LC"}, {"UC", "Oneofs_UC"}, {"S", "Oneofs_S"}, {"lower_snake_case_o", "Oneofs_LowerSnakeCaseO"}, {"Upper_Snake_Case_O", "Oneofs_Upper_Snake_Case_O"}, {"SCREAMING_SNAKE_CASE_O", "Oneofs_SCREAMING_SNAKE_CASE_O"}, {"lowerCamelCaseO", "Oneofs_LowerCamelCaseO"}, {"UpperCamelCaseO", "Oneofs_UpperCamelCaseO"}, {"reset", "Oneofs_Reset_"}, {"some_msg", "Oneofs_SomeMsg_"}, {"some_enum", "Oneofs_SomeEnum_"}, } for _, test := range tests { tc := test t.Run(tc.field, func(t *testing.T) { t.Parallel() e, ok := ast.Lookup(".names.entities.Oneofs." + tc.field) require.True(t, ok, "could not find field") f := e.(pgs.Field) assert.Equal(t, tc.expected, ctx.OneofOption(f)) }) } } func TestContext_ClientName(t *testing.T) { t.Parallel() ast := buildGraph(t, "names", "entities") ctx := loadContext(t, "names", "entities") tests := []struct { service string expected pgs.Name }{ {"UpperCamelService", "UpperCamelServiceClient"}, {"lowerCamelService", "LowerCamelServiceClient"}, {"lower_snake_service", "LowerSnakeServiceClient"}, {"Upper_Snake_Service", "Upper_Snake_ServiceClient"}, {"SCREAMING_SNAKE_SERVICE", "SCREAMING_SNAKE_SERVICEClient"}, {"reset", "ResetClient"}, } for _, test := range tests { tc := test t.Run(tc.service, func(t *testing.T) { t.Parallel() e, ok := ast.Lookup(".names.entities." + tc.service) require.True(t, ok, "could not find service") s := e.(pgs.Service) assert.Equal(t, tc.expected, ctx.ClientName(s)) }) } } func TestContext_ServerStream(t *testing.T) { t.Parallel() ast := buildGraph(t, "names", "entities") ctx := loadContext(t, "names", "entities") tests := []struct { method string expected pgs.Name }{ {"UpperCamel", "Service_UpperCamelServer"}, {"lowerCamel", "Service_LowerCamelServer"}, {"lower_snake", "Service_LowerSnakeServer"}, } for _, test := range tests { tc := test t.Run(tc.method, func(t *testing.T) { t.Parallel() e, ok := ast.Lookup(".names.entities.Service." + tc.method) require.True(t, ok, "could not find method") m := e.(pgs.Method) assert.Equal(t, tc.expected, ctx.ServerStream(m)) }) } } protoc-gen-star-2.0.3/lang/go/package.go000066400000000000000000000062371440740147700200300ustar00rootroot00000000000000package pgsgo import ( "go/token" "regexp" "strings" "unicode" "unicode/utf8" pgs "github.com/lyft/protoc-gen-star/v2" ) var nonAlphaNumPattern = regexp.MustCompile("[^a-zA-Z0-9]") func (c context) PackageName(node pgs.Node) pgs.Name { e, ok := node.(pgs.Entity) if !ok { e = node.(pgs.Package).Files()[0] } _, pkg := c.optionPackage(e) // use import_path parameter ONLY if there is no go_package option in the file. if ip := c.p.Str("import_path"); ip != "" && e.File().Descriptor().GetOptions().GetGoPackage() == "" { pkg = ip } // if the package name is a Go keyword, prefix with '_' if token.Lookup(pkg).IsKeyword() { pkg = "_" + pkg } // if package starts with digit, prefix with `_` if r, _ := utf8.DecodeRuneInString(pkg); unicode.IsDigit(r) { pkg = "_" + pkg } // package name is kosher return pgs.Name(pkg) } func (c context) ImportPath(e pgs.Entity) pgs.FilePath { path, _ := c.optionPackage(e) path = c.p.Str("import_prefix") + path return pgs.FilePath(path) } func (c context) OutputPath(e pgs.Entity) pgs.FilePath { out := e.File().InputPath().SetExt(".pb.go") // source relative doesn't try to be fancy if Paths(c.p) == SourceRelative { return out } path, _ := c.optionPackage(e) // Import relative ignores the existing file structure return pgs.FilePath(path).Push(out.Base()) } func (c context) optionPackage(e pgs.Entity) (path, pkg string) { // M mapping param overrides everything IFF the entity is not a build target if override, ok := c.p["M"+e.File().InputPath().String()]; ok && !e.BuildTarget() { path = override pkg = override if idx := strings.LastIndex(pkg, "/"); idx > -1 { pkg = pkg[idx+1:] } return } // check if there's a go_package option specified pkg = c.resolveGoPackageOption(e) path = e.File().InputPath().Dir().String() if pkg == "" { // have a proto package name, so use that if n := e.Package().ProtoName(); n != "" { pkg = n.SnakeCase().String() } else { // no other info, then replace all non-alphanumerics from the input file name pkg = nonAlphaNumPattern.ReplaceAllString(e.File().InputPath().BaseName(), "_") } return } // go_package="example.com/foo/bar;baz" should have a package name of `baz` if idx := strings.LastIndex(pkg, ";"); idx > -1 { path = pkg[:idx] pkg = nonAlphaNumPattern.ReplaceAllString(pkg[idx+1:], "_") return } // go_package="example.com/foo/bar" should have a package name of `bar` if idx := strings.LastIndex(pkg, "/"); idx > -1 { path = pkg pkg = nonAlphaNumPattern.ReplaceAllString(pkg[idx+1:], "_") return } pkg = nonAlphaNumPattern.ReplaceAllString(pkg, "_") return } func (c context) resolveGoPackageOption(e pgs.Entity) string { // attempt to get it from the current file if pkg := e.File().Descriptor().GetOptions().GetGoPackage(); pkg != "" { return pkg } // protoc-gen-go will use the go_package option from _any_ file in the same // execution since it's assumed that all the files are in the same Go // package. PG* will only verify this against files in the same proto package for _, f := range e.Package().Files() { if pkg := f.Descriptor().GetOptions().GetGoPackage(); pkg != "" { return pkg } } return "" } protoc-gen-star-2.0.3/lang/go/package_test.go000066400000000000000000000064051440740147700210640ustar00rootroot00000000000000package pgsgo import ( "testing" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestPackageName(t *testing.T) { t.Parallel() tests := []struct { dir string expected pgs.Name }{ {"keyword", "_package"}, // go keywords are prefixed with _ {"package", "my_package"}, // use the go_package option {"import", "bar"}, // uses the basename if go_package contains a / {"override", "baz"}, // if go_package contains ;, use everything to the right {"mapped", "unaffected"}, // M mapped params are ignored for build targets {"import_path_mapped", "go_package"}, // mixed import_path and M parameters should lose to go_package {"transitive_package", "foobar"}, // go_option gets picked up from other files if present {"path_dash", "path_dash"}, // if basename of go_package contains invalid characters, replace with _ } for _, test := range tests { tc := test t.Run(tc.dir, func(t *testing.T) { t.Parallel() ast := buildGraph(t, "names", tc.dir) ctx := loadContext(t, "names", tc.dir) for _, target := range ast.Targets() { assert.Equal(t, tc.expected, ctx.PackageName(target)) } }) } } func TestImportPath(t *testing.T) { t.Parallel() tests := []struct { dir string fully pgs.FilePath unqualified pgs.FilePath none pgs.FilePath }{ { // no params changing the behavior of the import paths "no_options", "example.com/packages/targets/fully_qualified", "targets/unqualified", "targets/none", }, { // M params provided for each imported package "mapped", "example.com/foo/bar", "example.com/fizz/buzz", "example.com/quux", }, } for _, test := range tests { tc := test t.Run(tc.dir, func(t *testing.T) { t.Parallel() ast := buildGraph(t, "packages", tc.dir) ctx := loadContext(t, "packages", tc.dir) pkgs := map[string]pgs.FilePath{ "packages.targets.fully_qualified": tc.fully, "packages.targets.unqualified": tc.unqualified, "packages.targets.none": tc.none, } for pkg, expected := range pkgs { t.Run(pkg, func(t *testing.T) { p, ok := ast.Packages()[pkg] require.True(t, ok, "package not found") f := p.Files()[0] assert.Equal(t, expected, ctx.ImportPath(f)) }) } }) } } func TestOutputPath(t *testing.T) { t.Parallel() tests := []struct { dir, file string expected pgs.FilePath }{ {"none", "none.proto", "none.pb.go"}, {"none_srcrel", "none.proto", "none.pb.go"}, {"unqualified", "unqualified.proto", "unqualified.pb.go"}, {"unqualified_srcrel", "unqualified.proto", "unqualified.pb.go"}, {"qualified", "qualified.proto", "example.com/qualified/qualified.pb.go"}, {"qualified_srcrel", "qualified.proto", "qualified.pb.go"}, {"mapped", "mapped.proto", "mapped.pb.go"}, {"mapped_srcrel", "mapped.proto", "mapped.pb.go"}, } for _, test := range tests { tc := test t.Run(tc.dir, func(t *testing.T) { t.Parallel() ast := buildGraph(t, "outputs", tc.dir) ctx := loadContext(t, "outputs", tc.dir) f, ok := ast.Lookup(tc.file) require.True(t, ok, "file not found") assert.Equal(t, tc.expected, ctx.OutputPath(f)) }) } } protoc-gen-star-2.0.3/lang/go/parameters.go000066400000000000000000000073341440740147700205770ustar00rootroot00000000000000package pgsgo import ( "fmt" "strings" pgs "github.com/lyft/protoc-gen-star/v2" ) const ( importPathKey = "import_path" importMapKeyPrefix = "M" pathTypeKey = "paths" pluginsKey = "plugins" pluginsSep = "+" ) // PathType describes how the generated output file paths should be constructed. type PathType string const ( // ImportPathRelative is the default and outputs the file based off the go // import path defined in the go_package option. ImportPathRelative PathType = "" // SourceRelative indicates files should be output relative to the path of // the source file. SourceRelative PathType = "source_relative" ) // Plugins returns the sub-plugins enabled for this protoc plugin. If the all // value is true, all registered plugins are considered enabled (ie, protoc was // called with an empty "plugins" parameter). Otherwise, plugins contains the // list of plugins enabled by name. func Plugins(p pgs.Parameters) (plugins []string, all bool) { s, ok := p[pluginsKey] if !ok { return } if all = s == ""; all { return } plugins = strings.Split(s, pluginsSep) return } // HasPlugin returns true if the plugin name is enabled in the parameters. This // method will always return true if all plugins are enabled. func HasPlugin(p pgs.Parameters, name string) bool { plugins, all := Plugins(p) if all { return true } for _, pl := range plugins { if pl == name { return true } } return false } // AddPlugin adds name to the list of plugins in the parameters. If all plugins // are enabled, this method is a noop. func AddPlugin(p pgs.Parameters, name ...string) { if len(name) == 0 { return } plugins, all := Plugins(p) if all { return } p.SetStr(pluginsKey, strings.Join(append(plugins, name...), pluginsSep)) } // EnableAllPlugins changes the parameters to enable all registered sub-plugins. func EnableAllPlugins(p pgs.Parameters) { p.SetStr(pluginsKey, "") } // ImportPath returns the protoc-gen-go parameter. This value is used as the // package if the input proto files do not declare a go_package option. If it // contains slashes, everything up to the rightmost slash is ignored. // // See: https://github.com/golang/protobuf#parameters func ImportPath(p pgs.Parameters) string { return p.Str(importPathKey) } // SetImportPath sets the protoc-gen-go ImportPath parameter. This is useful // for overriding the behavior of the ImportPath at runtime. func SetImportPath(p pgs.Parameters, path string) { p.SetStr(importPathKey, path) } // Paths returns the protoc-gen-go parameter. This value is used to switch the // mode used to determine the output paths of the generated code. By default, // paths are derived from the import path specified by go_package. It can be // overridden to be "source_relative", ignoring the import path using the // source path exclusively. func Paths(p pgs.Parameters) PathType { return PathType(p.Str(pathTypeKey)) } // SetPaths sets the protoc-gen-go Paths parameter. This is useful for // overriding the behavior of Paths at runtime. func SetPaths(p pgs.Parameters, pt PathType) { p.SetStr(pathTypeKey, string(pt)) } // MappedImport returns the protoc-gen-go import overrides for the specified proto // file. Each entry in the map keys off a proto file (as loaded by protoc) with // values of the Go package to use. These values will be prefixed with the // value of ImportPrefix when generating the Go code. func MappedImport(p pgs.Parameters, proto string) (string, bool) { imp, ok := p[fmt.Sprintf("%s%s", importMapKeyPrefix, proto)] return imp, ok } // AddImportMapping adds a proto file to Go package import mapping to the // parameters. func AddImportMapping(p pgs.Parameters, proto, pkg string) { p[fmt.Sprintf("%s%s", importMapKeyPrefix, proto)] = pkg } protoc-gen-star-2.0.3/lang/go/parameters_test.go000066400000000000000000000045651440740147700216410ustar00rootroot00000000000000package pgsgo import ( "testing" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/stretchr/testify/assert" ) func TestParameters_Plugins(t *testing.T) { t.Parallel() p := pgs.Parameters{} plugins, all := Plugins(p) assert.Empty(t, plugins) assert.False(t, all) p[pluginsKey] = "foo+bar" plugins, all = Plugins(p) assert.Equal(t, []string{"foo", "bar"}, plugins) assert.False(t, all) p[pluginsKey] = "" plugins, all = Plugins(p) assert.Empty(t, plugins) assert.True(t, all) } func TestParameters_HasPlugin(t *testing.T) { t.Parallel() p := pgs.Parameters{} assert.False(t, HasPlugin(p, "foo")) p[pluginsKey] = "foo" assert.True(t, HasPlugin(p, "foo")) p[pluginsKey] = "" assert.True(t, HasPlugin(p, "foo")) p[pluginsKey] = "bar" assert.False(t, HasPlugin(p, "foo")) } func TestParameters_AddPlugin(t *testing.T) { t.Parallel() p := pgs.Parameters{} AddPlugin(p, "foo", "bar") assert.Equal(t, "foo+bar", p[pluginsKey]) AddPlugin(p, "baz") assert.Equal(t, "foo+bar+baz", p[pluginsKey]) AddPlugin(p) assert.Equal(t, "foo+bar+baz", p[pluginsKey]) p[pluginsKey] = "" AddPlugin(p, "fizz", "buzz") assert.Equal(t, "", p[pluginsKey]) } func TestParameters_EnableAllPlugins(t *testing.T) { t.Parallel() p := pgs.Parameters{pluginsKey: "foo"} _, all := Plugins(p) assert.False(t, all) EnableAllPlugins(p) _, all = Plugins(p) assert.True(t, all) } func TestParameters_ImportPath(t *testing.T) { t.Parallel() p := pgs.Parameters{} assert.Empty(t, ImportPath(p)) SetImportPath(p, "foo") assert.Equal(t, "foo", ImportPath(p)) } func TestParameters_ImportMap(t *testing.T) { t.Parallel() p := pgs.Parameters{ "Mfoo.proto": "bar", "Mfizz/buzz.proto": "baz", } AddImportMapping(p, "quux.proto", "shme") tests := []struct { proto, path string exists bool }{ {"quux.proto", "shme", true}, {"foo.proto", "bar", true}, {"fizz/buzz.proto", "baz", true}, {"abcde.proto", "", false}, } for _, test := range tests { t.Run(test.proto, func(t *testing.T) { path, ok := MappedImport(p, test.proto) if test.exists { assert.True(t, ok) assert.Equal(t, test.path, path) } else { assert.False(t, ok) } }) } } func TestParameters_Paths(t *testing.T) { t.Parallel() p := pgs.Parameters{} assert.Equal(t, ImportPathRelative, Paths(p)) SetPaths(p, SourceRelative) assert.Equal(t, SourceRelative, Paths(p)) } protoc-gen-star-2.0.3/lang/go/testdata/000077500000000000000000000000001440740147700177075ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/000077500000000000000000000000001440740147700210125ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/entities/000077500000000000000000000000001440740147700226365ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/entities/entities.proto000066400000000000000000000045411440740147700255530ustar00rootroot00000000000000syntax = "proto3"; package names.entities; option go_package = "example.com/names/entities"; message UpperCamelCaseMessage {} message lowerCamelCaseMessage {} message SCREAMING_SNAKE_CASE {} message Upper_Snake_Case {} message lower_snake_case {} message lowercase {} message UPPERCASE {} message String {} // protected name message Nested { message Message { message Message {} enum Enum { NESTED = 0; } } message String {} // protected name enum Enum { VALUE = 0; } enum lowercase { lower = 0; } message lowerMsg {} } enum UpperCamelCaseEnum { SCREAMING_SNAKE_CASE_ENUM_VALUE = 0; lower_snake_case_enum_value = 1; Upper_Snake_Case_Enum_Value = 2; UpperCamelCaseEnumValue = 3; lowerCamelCaseEnumValue = 4; } enum lowerCamelCaseEnum {LCC_Value = 0;} enum SCREAMING_SNAKE_ENUM {SS_Value = 0;} enum lower_snake_enum {LS_Value = 0;} enum Upper_Snake_Enum {US_Value = 0;} message Fields { bool lower_snake_case = 1; bool Upper_Snake_Case = 2; bool SCREAMING_SNAKE_CASE = 3; bool lowerCamelCase = 4; bool UpperCamelCase = 5; bool string = 6; // protected name } message Oneofs { oneof lower_snake_case {bool LS = 1;} oneof Upper_Snake_Case {bool US = 2;} oneof SCREAMING_SNAKE_CASE {bool SS = 3;} oneof lowerCamelCase {bool LC = 4;} oneof UpperCamelCase {bool UC = 5;} // protected oneof string {bool S = 6;} oneof oneof { bool lower_snake_case_o = 7; bool Upper_Snake_Case_O = 8; bool SCREAMING_SNAKE_CASE_O = 9; bool lowerCamelCaseO = 10; bool UpperCamelCaseO = 11; bool reset = 12; // protected } message SomeMsg {} enum SomeEnum { VALUE = 0; } oneof some_msg_oneof { SomeMsg some_msg = 13; SomeEnum some_enum = 14; } } service UpperCamelService {} service lowerCamelService {} service lower_snake_service {} service Upper_Snake_Service {} service SCREAMING_SNAKE_SERVICE {} service reset {} service Service { rpc UpperCamel(stream String) returns (String); rpc lowerCamel(String) returns (stream String); rpc lower_snake(stream String) returns (stream String); rpc Upper_Snake(String) returns (String); rpc SCREAMING_SNAKE(String) returns (String); rpc Reset(String) returns (String); } message MsgWith3dInside {} message MsgEndsWith3d {} protoc-gen-star-2.0.3/lang/go/testdata/names/entities/params000066400000000000000000000000001440740147700240320ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/import/000077500000000000000000000000001440740147700223245ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/import/import.proto000066400000000000000000000001231440740147700247170ustar00rootroot00000000000000syntax="proto3"; package names.import; option go_package = "example.com/foo/bar"; protoc-gen-star-2.0.3/lang/go/testdata/names/import/params000066400000000000000000000000001440740147700235200ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/import_path_mapped/000077500000000000000000000000001440740147700246665ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/import_path_mapped/import_path_mapped.proto000066400000000000000000000001731440740147700316300ustar00rootroot00000000000000syntax="proto3"; package names.import_path_mapped; option go_package="github.com/fizz/buzz;go_package"; message Mapped {} protoc-gen-star-2.0.3/lang/go/testdata/names/import_path_mapped/params000066400000000000000000000000551440740147700260740ustar00rootroot00000000000000Mimport_path_mapped.proto=github.com/foo/bar protoc-gen-star-2.0.3/lang/go/testdata/names/keyword/000077500000000000000000000000001440740147700224765ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/keyword/keyword.proto000066400000000000000000000001501440740147700252430ustar00rootroot00000000000000syntax="proto3"; package names.keyword; option go_package = "example.com/package"; message Package {} protoc-gen-star-2.0.3/lang/go/testdata/names/keyword/params000066400000000000000000000000001440740147700236720ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/mapped/000077500000000000000000000000001440740147700222605ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/mapped/mapped.proto000066400000000000000000000001351440740147700246120ustar00rootroot00000000000000syntax="proto3"; package names.mapped; option go_package="./;unaffected"; message Mapped {} protoc-gen-star-2.0.3/lang/go/testdata/names/mapped/params000066400000000000000000000000411440740147700234610ustar00rootroot00000000000000Mmapped.proto=example.com/foobar protoc-gen-star-2.0.3/lang/go/testdata/names/override/000077500000000000000000000000001440740147700226315ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/override/override.proto000066400000000000000000000001311440740147700255300ustar00rootroot00000000000000syntax="proto3"; package names.override; option go_package = "example.com/foo/bar;baz"; protoc-gen-star-2.0.3/lang/go/testdata/names/override/params000066400000000000000000000000001440740147700240250ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/package/000077500000000000000000000000001440740147700224055ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/package/package.proto000066400000000000000000000001711440740147700250640ustar00rootroot00000000000000syntax="proto3"; package names.package; option go_package = "example.com/names/package;my_package"; message Package {} protoc-gen-star-2.0.3/lang/go/testdata/names/package/params000066400000000000000000000000001440740147700236010ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/path_dash/000077500000000000000000000000001440740147700227455ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/path_dash/params000066400000000000000000000000001440740147700241410ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/path_dash/path_dash.proto000066400000000000000000000001511440740147700257620ustar00rootroot00000000000000syntax="proto3"; package names.path_dash; option go_package="example.com/path-dash"; message PathDash {}protoc-gen-star-2.0.3/lang/go/testdata/names/transitive_package/000077500000000000000000000000001440740147700246555ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/transitive_package/other.proto000066400000000000000000000002111440740147700270550ustar00rootroot00000000000000syntax="proto3"; package names.transitive_package; option go_package="example.com/names/foobar"; message Other { bool value = 1; } protoc-gen-star-2.0.3/lang/go/testdata/names/transitive_package/params000066400000000000000000000002231440740147700260600ustar00rootroot00000000000000Mmapped.proto=example.com/foo/bar,Mmapped_no_options/mapped.proto=example.com/fizz/buzz,Mnames/mapped_no_options/mapped.proto=example.com/quux/baz protoc-gen-star-2.0.3/lang/go/testdata/names/transitive_package/transitive.proto000066400000000000000000000002161440740147700301310ustar00rootroot00000000000000syntax="proto3"; package names.transitive_package; option go_package="example.com/names/foobar"; message Transitive { bool value = 1; } protoc-gen-star-2.0.3/lang/go/testdata/names/types/000077500000000000000000000000001440740147700221565ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/types/params000066400000000000000000000000001440740147700233520ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/names/types/proto2.proto000066400000000000000000000040611440740147700244710ustar00rootroot00000000000000syntax="proto2"; package names.types; option go_package = "example.com/foo/bar"; import "google/protobuf/duration.proto"; import "google/protobuf/type.proto"; message Proto2 { optional double double = 1; optional float float = 2; optional int64 int64 = 3; optional sfixed64 sfixed64 = 4; optional sint64 sint64 = 5; optional uint64 uint64 = 6; optional fixed64 fixed64 = 7; optional int32 int32 = 8; optional sfixed32 sfixed32 = 9; optional sint32 sint32 = 10; optional uint32 uint32 = 11; optional fixed32 fixed32 = 12; optional bool bool = 13; optional string string = 14; optional bytes bytes = 15; optional Enum enum = 16; optional google.protobuf.Syntax ext_enum = 17; optional Required msg = 18; optional google.protobuf.Duration ext_msg = 19; repeated double repeated_scalar = 20; repeated Enum repeated_enum = 21; repeated google.protobuf.Syntax repeated_ext_enum = 22; repeated Required repeated_msg = 23; repeated google.protobuf.Duration repeated_ext_msg = 24; map map_scalar = 25; map map_enum = 26; map map_ext_enum = 27; map map_msg = 28; map map_ext_msg = 29; enum Enum {VALUE = 0;} message Required { required double double = 1; required float float = 2; required int64 int64 = 3; required sfixed64 sfixed64 = 4; required sint64 sint64 = 5; required uint64 uint64 = 6; required fixed64 fixed64 = 7; required int32 int32 = 8; required sfixed32 sfixed32 = 9; required sint32 sint32 = 10; required uint32 uint32 = 11; required fixed32 fixed32 = 12; required bool bool = 13; required string string = 14; required bytes bytes = 15; required Enum enum = 16; required google.protobuf.Syntax ext_enum = 17; required Required msg = 18; required google.protobuf.Duration ext_msg = 19; } } protoc-gen-star-2.0.3/lang/go/testdata/names/types/proto3.proto000066400000000000000000000022471440740147700244760ustar00rootroot00000000000000syntax="proto3"; package names.types; option go_package = "example.com/foo/bar"; import "google/protobuf/duration.proto"; import "google/protobuf/type.proto"; message Proto3 { double double = 1; float float = 2; int64 int64 = 3; sfixed64 sfixed64 = 4; sint64 sint64 = 5; uint64 uint64 = 6; fixed64 fixed64 = 7; int32 int32 = 8; sfixed32 sfixed32 = 9; sint32 sint32 = 10; uint32 uint32 = 11; fixed32 fixed32 = 12; bool bool = 13; string string = 14; bytes bytes = 15; Enum enum = 16; google.protobuf.Syntax ext_enum = 17; Message msg = 18; google.protobuf.Duration ext_msg = 19; repeated double repeated_scalar = 20; repeated Enum repeated_enum = 21; repeated google.protobuf.Syntax repeated_ext_enum = 22; repeated Message repeated_msg = 23; repeated google.protobuf.Duration repeated_ext_msg = 24; map map_scalar = 25; map map_enum = 26; map map_ext_enum = 27; map map_msg = 28; map map_ext_msg = 29; enum Enum {VALUE = 0;} message Message {} } protoc-gen-star-2.0.3/lang/go/testdata/outputs/000077500000000000000000000000001440740147700214325ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/mapped/000077500000000000000000000000001440740147700227005ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/mapped/mapped.proto000066400000000000000000000001331440740147700252300ustar00rootroot00000000000000syntax="proto3"; package outputs.mapped; option go_package="./;mapped"; message Mapped {} protoc-gen-star-2.0.3/lang/go/testdata/outputs/mapped/params000066400000000000000000000000411440740147700241010ustar00rootroot00000000000000Mmapped.proto=example.com/foobar protoc-gen-star-2.0.3/lang/go/testdata/outputs/mapped_srcrel/000077500000000000000000000000001440740147700242525ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/mapped_srcrel/mapped.proto000066400000000000000000000001331440740147700266020ustar00rootroot00000000000000syntax="proto3"; package outputs.mapped; option go_package="./;mapped"; message Mapped {} protoc-gen-star-2.0.3/lang/go/testdata/outputs/mapped_srcrel/params000066400000000000000000000000671440740147700254630ustar00rootroot00000000000000paths=source_relative,Mmapped.proto=example.com/foobar protoc-gen-star-2.0.3/lang/go/testdata/outputs/none/000077500000000000000000000000001440740147700223715ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/none/none.proto000066400000000000000000000001241440740147700244120ustar00rootroot00000000000000syntax="proto3"; package outputs.none; option go_package="./;none"; message None {} protoc-gen-star-2.0.3/lang/go/testdata/outputs/none/params000066400000000000000000000000001440740147700235650ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/none_srcrel/000077500000000000000000000000001440740147700237435ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/none_srcrel/none.proto000066400000000000000000000001211440740147700257610ustar00rootroot00000000000000syntax="proto3"; option go_package="example.com/outputs/none"; message None {} protoc-gen-star-2.0.3/lang/go/testdata/outputs/none_srcrel/params000066400000000000000000000000261440740147700251470ustar00rootroot00000000000000paths=source_relative protoc-gen-star-2.0.3/lang/go/testdata/outputs/qualified/000077500000000000000000000000001440740147700233755ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/qualified/params000066400000000000000000000000001440740147700245710ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/qualified/qualified.proto000066400000000000000000000001611440740147700264230ustar00rootroot00000000000000syntax="proto3"; package outputs.qualified; option go_package="example.com/qualified"; message FullyQualified{} protoc-gen-star-2.0.3/lang/go/testdata/outputs/qualified_srcrel/000077500000000000000000000000001440740147700247475ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/qualified_srcrel/params000066400000000000000000000000261440740147700261530ustar00rootroot00000000000000paths=source_relative protoc-gen-star-2.0.3/lang/go/testdata/outputs/qualified_srcrel/qualified.proto000066400000000000000000000001611440740147700277750ustar00rootroot00000000000000syntax="proto3"; package outputs.qualified; option go_package="example.com/qualified"; message FullyQualified{} protoc-gen-star-2.0.3/lang/go/testdata/outputs/unqualified/000077500000000000000000000000001440740147700237405ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/unqualified/params000066400000000000000000000000001440740147700251340ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/unqualified/unqualified.proto000066400000000000000000000001511440740147700273300ustar00rootroot00000000000000syntax="proto3"; package outputs.unqualified; option go_package="./;unqualified"; message Unqualified{} protoc-gen-star-2.0.3/lang/go/testdata/outputs/unqualified_srcrel/000077500000000000000000000000001440740147700253125ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/outputs/unqualified_srcrel/params000066400000000000000000000000261440740147700265160ustar00rootroot00000000000000paths=source_relative protoc-gen-star-2.0.3/lang/go/testdata/outputs/unqualified_srcrel/unqualified.proto000066400000000000000000000001721440740147700307050ustar00rootroot00000000000000syntax="proto3"; package outputs.unqualified; option go_package="example.com/outputs/unqualified"; message Unqualified{} protoc-gen-star-2.0.3/lang/go/testdata/packages/000077500000000000000000000000001440740147700214655ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/mapped/000077500000000000000000000000001440740147700227335ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/mapped/mapped.proto000066400000000000000000000005631440740147700252720ustar00rootroot00000000000000syntax="proto3"; package packages.mapped; option go_package="./;mapped"; import "targets/fully_qualified/fully_qualified.proto"; import "targets/unqualified/unqualified.proto"; import "targets/none/none.proto"; message Mapped { targets.fully_qualified.FullyQualified fully = 1; targets.unqualified.Unqualified unqualified = 2; targets.none.None none = 3; } protoc-gen-star-2.0.3/lang/go/testdata/packages/mapped/params000066400000000000000000000002521440740147700241400ustar00rootroot00000000000000Mtargets/fully_qualified/fully_qualified.proto=example.com/foo/bar,Mtargets/unqualified/unqualified.proto=example.com/fizz/buzz,Mtargets/none/none.proto=example.com/quux protoc-gen-star-2.0.3/lang/go/testdata/packages/no_options/000077500000000000000000000000001440740147700236545ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/no_options/no_options.proto000066400000000000000000000006201440740147700271260ustar00rootroot00000000000000syntax="proto3"; package packages.no_options; option go_package="example.com/packages/no_options"; import "targets/fully_qualified/fully_qualified.proto"; import "targets/unqualified/unqualified.proto"; import "targets/none/none.proto"; message NoOptions { targets.fully_qualified.FullyQualified fully = 1; targets.unqualified.Unqualified unqualified = 2; targets.none.None none = 3; } protoc-gen-star-2.0.3/lang/go/testdata/packages/no_options/params000066400000000000000000000000001440740147700250500ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/000077500000000000000000000000001440740147700231365ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/fully_qualified/000077500000000000000000000000001440740147700263145ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/fully_qualified/fully_qualified.proto000066400000000000000000000002271440740147700325600ustar00rootroot00000000000000syntax="proto3"; package packages.targets.fully_qualified; option go_package="example.com/packages/targets/fully_qualified"; message FullyQualified{} protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/none/000077500000000000000000000000001440740147700240755ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/none/none.proto000066400000000000000000000001421440740147700261160ustar00rootroot00000000000000syntax="proto3"; package packages.targets.none; option go_package="targets/none"; message None{} protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/unqualified/000077500000000000000000000000001440740147700254445ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/packages/targets/unqualified/unqualified.proto000066400000000000000000000001671440740147700310430ustar00rootroot00000000000000syntax="proto3"; package packages.targets.unqualified; option go_package="targets/unqualified"; message Unqualified{} protoc-gen-star-2.0.3/lang/go/testdata/presence/000077500000000000000000000000001440740147700215135ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/presence/types/000077500000000000000000000000001440740147700226575ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/presence/types/params000066400000000000000000000000001440740147700240530ustar00rootroot00000000000000protoc-gen-star-2.0.3/lang/go/testdata/presence/types/proto3.proto000066400000000000000000000036331440740147700251770ustar00rootroot00000000000000syntax="proto3"; package names.types; option go_package = "example.com/foo/bar"; import "google/protobuf/duration.proto"; import "google/protobuf/type.proto"; message Proto3 { double double = 1; float float = 2; int64 int64 = 3; sfixed64 sfixed64 = 4; sint64 sint64 = 5; uint64 uint64 = 6; fixed64 fixed64 = 7; int32 int32 = 8; sfixed32 sfixed32 = 9; sint32 sint32 = 10; uint32 uint32 = 11; fixed32 fixed32 = 12; bool bool = 13; string string = 14; bytes bytes = 15; Enum enum = 16; google.protobuf.Syntax ext_enum = 17; Message msg = 18; google.protobuf.Duration ext_msg = 19; repeated double repeated_scalar = 20; repeated Enum repeated_enum = 21; repeated google.protobuf.Syntax repeated_ext_enum = 22; repeated Message repeated_msg = 23; repeated google.protobuf.Duration repeated_ext_msg = 24; map map_scalar = 25; map map_enum = 26; map map_ext_enum = 27; map map_msg = 28; map map_ext_msg = 29; enum Enum {VALUE = 0;} message Message {} message Optional { optional double double = 1; optional float float = 2; optional int64 int64 = 3; optional sfixed64 sfixed64 = 4; optional sint64 sint64 = 5; optional uint64 uint64 = 6; optional fixed64 fixed64 = 7; optional int32 int32 = 8; optional sfixed32 sfixed32 = 9; optional sint32 sint32 = 10; optional uint32 uint32 = 11; optional fixed32 fixed32 = 12; optional bool bool = 13; optional string string = 14; optional bytes bytes = 15; optional Enum enum = 16; optional google.protobuf.Syntax ext_enum = 17; optional Optional msg = 18; optional google.protobuf.Duration ext_msg = 19; } } protoc-gen-star-2.0.3/lang/go/type_name.go000066400000000000000000000062511440740147700204120ustar00rootroot00000000000000package pgsgo import ( "fmt" "strings" pgs "github.com/lyft/protoc-gen-star/v2" ) func (c context) Type(f pgs.Field) TypeName { ft := f.Type() var t TypeName switch { case ft.IsMap(): key := scalarType(ft.Key().ProtoType()) return TypeName(fmt.Sprintf("map[%s]%s", key, c.elType(ft))) case ft.IsRepeated(): return TypeName(fmt.Sprintf("[]%s", c.elType(ft))) case ft.IsEmbed(): return c.importableTypeName(f, ft.Embed()).Pointer() case ft.IsEnum(): t = c.importableTypeName(f, ft.Enum()) default: t = scalarType(ft.ProtoType()) } if f.HasPresence() { return t.Pointer() } return t } func (c context) importableTypeName(f pgs.Field, e pgs.Entity) TypeName { t := TypeName(c.Name(e)) if c.ImportPath(e) == c.ImportPath(f) { return t } return TypeName(fmt.Sprintf("%s.%s", c.PackageName(e), t)) } func (c context) elType(ft pgs.FieldType) TypeName { el := ft.Element() switch { case el.IsEnum(): return c.importableTypeName(ft.Field(), el.Enum()) case el.IsEmbed(): return c.importableTypeName(ft.Field(), el.Embed()).Pointer() default: return scalarType(el.ProtoType()) } } func scalarType(t pgs.ProtoType) TypeName { switch t { case pgs.DoubleT: return "float64" case pgs.FloatT: return "float32" case pgs.Int64T, pgs.SFixed64, pgs.SInt64: return "int64" case pgs.UInt64T, pgs.Fixed64T: return "uint64" case pgs.Int32T, pgs.SFixed32, pgs.SInt32: return "int32" case pgs.UInt32T, pgs.Fixed32T: return "uint32" case pgs.BoolT: return "bool" case pgs.StringT: return "string" case pgs.BytesT: return "[]byte" default: panic("unreachable: invalid scalar type") } } // A TypeName describes the name of a type (type on a field, or method signature) type TypeName string // String satisfies the strings.Stringer interface. func (n TypeName) String() string { return string(n) } // Element returns the TypeName of the element of n. For types other than // slices and maps, this just returns n. func (n TypeName) Element() TypeName { parts := strings.SplitN(string(n), "]", 2) return TypeName(parts[len(parts)-1]) } // Key returns the TypeName of the key of n. For slices, the return TypeName is // always "int", and for non slice/map types an empty TypeName is returned. func (n TypeName) Key() TypeName { parts := strings.SplitN(string(n), "]", 2) if len(parts) == 1 { return TypeName("") } parts = strings.SplitN(parts[0], "[", 2) if len(parts) != 2 { return TypeName("") } else if parts[1] == "" { return TypeName("int") } return TypeName(parts[1]) } // IsPointer reports whether TypeName n is a pointer type, slice or a map. func (n TypeName) IsPointer() bool { ns := string(n) return strings.HasPrefix(ns, "*") || strings.HasPrefix(ns, "[") || strings.HasPrefix(ns, "map[") } // Pointer converts TypeName n to it's pointer type. If n is already a pointer, // slice, or map, it is returned unmodified. func (n TypeName) Pointer() TypeName { if n.IsPointer() { return n } return TypeName("*" + string(n)) } // Value converts TypeName n to it's value type. If n is already a value type, // slice, or map it is returned unmodified. func (n TypeName) Value() TypeName { return TypeName(strings.TrimPrefix(string(n), "*")) } protoc-gen-star-2.0.3/lang/go/type_name_p2_presence_test.go000066400000000000000000000074341440740147700237420ustar00rootroot00000000000000//go:build !proto3_presence // +build !proto3_presence package pgsgo import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" pgs "github.com/lyft/protoc-gen-star/v2" ) func TestType(t *testing.T) { t.Parallel() ast := buildGraph(t, "names", "types") ctx := loadContext(t, "names", "types") tests := []struct { field string expected TypeName }{ // proto2 syntax, optional {"Proto2.double", "*float64"}, {"Proto2.float", "*float32"}, {"Proto2.int64", "*int64"}, {"Proto2.sfixed64", "*int64"}, {"Proto2.sint64", "*int64"}, {"Proto2.uint64", "*uint64"}, {"Proto2.fixed64", "*uint64"}, {"Proto2.int32", "*int32"}, {"Proto2.sfixed32", "*int32"}, {"Proto2.sint32", "*int32"}, {"Proto2.uint32", "*uint32"}, {"Proto2.fixed32", "*uint32"}, {"Proto2.bool", "*bool"}, {"Proto2.string", "*string"}, {"Proto2.bytes", "[]byte"}, {"Proto2.enum", "*Proto2_Enum"}, {"Proto2.ext_enum", "*typepb.Syntax"}, {"Proto2.msg", "*Proto2_Required"}, {"Proto2.ext_msg", "*durationpb.Duration"}, {"Proto2.repeated_scalar", "[]float64"}, {"Proto2.repeated_enum", "[]Proto2_Enum"}, {"Proto2.repeated_ext_enum", "[]typepb.Syntax"}, {"Proto2.repeated_msg", "[]*Proto2_Required"}, {"Proto2.repeated_ext_msg", "[]*durationpb.Duration"}, {"Proto2.map_scalar", "map[string]float32"}, {"Proto2.map_enum", "map[int32]Proto2_Enum"}, {"Proto2.map_ext_enum", "map[uint64]typepb.Syntax"}, {"Proto2.map_msg", "map[uint32]*Proto2_Required"}, {"Proto2.map_ext_msg", "map[int64]*durationpb.Duration"}, // proto2 syntax, required {"Proto2.Required.double", "*float64"}, {"Proto2.Required.float", "*float32"}, {"Proto2.Required.int64", "*int64"}, {"Proto2.Required.sfixed64", "*int64"}, {"Proto2.Required.sint64", "*int64"}, {"Proto2.Required.uint64", "*uint64"}, {"Proto2.Required.fixed64", "*uint64"}, {"Proto2.Required.int32", "*int32"}, {"Proto2.Required.sfixed32", "*int32"}, {"Proto2.Required.sint32", "*int32"}, {"Proto2.Required.uint32", "*uint32"}, {"Proto2.Required.fixed32", "*uint32"}, {"Proto2.Required.bool", "*bool"}, {"Proto2.Required.string", "*string"}, {"Proto2.Required.bytes", "[]byte"}, {"Proto2.Required.enum", "*Proto2_Enum"}, {"Proto2.Required.ext_enum", "*typepb.Syntax"}, {"Proto2.Required.msg", "*Proto2_Required"}, {"Proto2.Required.ext_msg", "*durationpb.Duration"}, {"Proto3.double", "float64"}, {"Proto3.float", "float32"}, {"Proto3.int64", "int64"}, {"Proto3.sfixed64", "int64"}, {"Proto3.sint64", "int64"}, {"Proto3.uint64", "uint64"}, {"Proto3.fixed64", "uint64"}, {"Proto3.int32", "int32"}, {"Proto3.sfixed32", "int32"}, {"Proto3.sint32", "int32"}, {"Proto3.uint32", "uint32"}, {"Proto3.fixed32", "uint32"}, {"Proto3.bool", "bool"}, {"Proto3.string", "string"}, {"Proto3.bytes", "[]byte"}, {"Proto3.enum", "Proto3_Enum"}, {"Proto3.ext_enum", "typepb.Syntax"}, {"Proto3.msg", "*Proto3_Message"}, {"Proto3.ext_msg", "*durationpb.Duration"}, {"Proto3.repeated_scalar", "[]float64"}, {"Proto3.repeated_enum", "[]Proto3_Enum"}, {"Proto3.repeated_ext_enum", "[]typepb.Syntax"}, {"Proto3.repeated_msg", "[]*Proto3_Message"}, {"Proto3.repeated_ext_msg", "[]*durationpb.Duration"}, {"Proto3.map_scalar", "map[string]float32"}, {"Proto3.map_enum", "map[int32]Proto3_Enum"}, {"Proto3.map_ext_enum", "map[uint64]typepb.Syntax"}, {"Proto3.map_msg", "map[uint32]*Proto3_Message"}, {"Proto3.map_ext_msg", "map[int64]*durationpb.Duration"}, } for _, test := range tests { tc := test t.Run(tc.field, func(t *testing.T) { t.Parallel() e, ok := ast.Lookup(".names.types." + tc.field) require.True(t, ok, "could not find field") fld, ok := e.(pgs.Field) require.True(t, ok, "entity is not a field") assert.Equal(t, tc.expected, ctx.Type(fld)) }) } } protoc-gen-star-2.0.3/lang/go/type_name_p3_presence_test.go000066400000000000000000000051611440740147700237360ustar00rootroot00000000000000// +build proto3_presence package pgsgo import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" pgs "github.com/lyft/protoc-gen-star/v2" ) func TestType(t *testing.T) { t.Parallel() ast := buildGraph(t, "presence", "types") ctx := loadContext(t, "presence", "types") tests := []struct { field string expected TypeName }{ {"Proto3.double", "float64"}, {"Proto3.float", "float32"}, {"Proto3.int64", "int64"}, {"Proto3.sfixed64", "int64"}, {"Proto3.sint64", "int64"}, {"Proto3.uint64", "uint64"}, {"Proto3.fixed64", "uint64"}, {"Proto3.int32", "int32"}, {"Proto3.sfixed32", "int32"}, {"Proto3.sint32", "int32"}, {"Proto3.uint32", "uint32"}, {"Proto3.fixed32", "uint32"}, {"Proto3.bool", "bool"}, {"Proto3.string", "string"}, {"Proto3.bytes", "[]byte"}, {"Proto3.enum", "Proto3_Enum"}, {"Proto3.ext_enum", "typepb.Syntax"}, {"Proto3.msg", "*Proto3_Message"}, {"Proto3.ext_msg", "*durationpb.Duration"}, {"Proto3.repeated_scalar", "[]float64"}, {"Proto3.repeated_enum", "[]Proto3_Enum"}, {"Proto3.repeated_ext_enum", "[]typepb.Syntax"}, {"Proto3.repeated_msg", "[]*Proto3_Message"}, {"Proto3.repeated_ext_msg", "[]*durationpb.Duration"}, {"Proto3.map_scalar", "map[string]float32"}, {"Proto3.map_enum", "map[int32]Proto3_Enum"}, {"Proto3.map_ext_enum", "map[uint64]typepb.Syntax"}, {"Proto3.map_msg", "map[uint32]*Proto3_Message"}, {"Proto3.map_ext_msg", "map[int64]*durationpb.Duration"}, // proto3 syntax optional {"Proto3.Optional.double", "*float64"}, {"Proto3.Optional.float", "*float32"}, {"Proto3.Optional.int64", "*int64"}, {"Proto3.Optional.sfixed64", "*int64"}, {"Proto3.Optional.sint64", "*int64"}, {"Proto3.Optional.uint64", "*uint64"}, {"Proto3.Optional.fixed64", "*uint64"}, {"Proto3.Optional.int32", "*int32"}, {"Proto3.Optional.sfixed32", "*int32"}, {"Proto3.Optional.sint32", "*int32"}, {"Proto3.Optional.uint32", "*uint32"}, {"Proto3.Optional.fixed32", "*uint32"}, {"Proto3.Optional.bool", "*bool"}, {"Proto3.Optional.string", "*string"}, {"Proto3.Optional.bytes", "[]byte"}, {"Proto3.Optional.enum", "*Proto3_Enum"}, {"Proto3.Optional.ext_enum", "*typepb.Syntax"}, {"Proto3.Optional.msg", "*Proto3_Optional"}, {"Proto3.Optional.ext_msg", "*durationpb.Duration"}, } for _, test := range tests { tc := test t.Run(tc.field, func(t *testing.T) { t.Parallel() e, ok := ast.Lookup(".names.types." + tc.field) require.True(t, ok, "could not find field") fld, ok := e.(pgs.Field) require.True(t, ok, "entity is not a field") assert.Equal(t, tc.expected, ctx.Type(fld)) }) } } protoc-gen-star-2.0.3/lang/go/type_name_test.go000066400000000000000000000076261440740147700214600ustar00rootroot00000000000000package pgsgo import ( "fmt" "testing" "github.com/stretchr/testify/assert" pgs "github.com/lyft/protoc-gen-star/v2" ) func TestTypeName(t *testing.T) { t.Parallel() tests := []struct { in string el string key string ptr string val string }{ { in: "int", el: "int", ptr: "*int", val: "int", }, { in: "*int", el: "*int", ptr: "*int", val: "int", }, { in: "foo.bar", el: "foo.bar", ptr: "*foo.bar", val: "foo.bar", }, { in: "*foo.bar", el: "*foo.bar", ptr: "*foo.bar", val: "foo.bar", }, { in: "[]string", el: "string", key: "int", ptr: "[]string", val: "[]string", }, { in: "[]*string", el: "*string", key: "int", ptr: "[]*string", val: "[]*string", }, { in: "[]foo.bar", el: "foo.bar", key: "int", ptr: "[]foo.bar", val: "[]foo.bar", }, { in: "[]*foo.bar", el: "*foo.bar", key: "int", ptr: "[]*foo.bar", val: "[]*foo.bar", }, { in: "map[string]float64", el: "float64", key: "string", ptr: "map[string]float64", val: "map[string]float64", }, { in: "map[string]*float64", el: "*float64", key: "string", ptr: "map[string]*float64", val: "map[string]*float64", }, { in: "map[string]foo.bar", el: "foo.bar", key: "string", ptr: "map[string]foo.bar", val: "map[string]foo.bar", }, { in: "map[string]*foo.bar", el: "*foo.bar", key: "string", ptr: "map[string]*foo.bar", val: "map[string]*foo.bar", }, { in: "[][]byte", el: "[]byte", key: "int", ptr: "[][]byte", val: "[][]byte", }, { in: "map[int64][]byte", el: "[]byte", key: "int64", ptr: "map[int64][]byte", val: "map[int64][]byte", }, } for _, test := range tests { tc := test t.Run(tc.in, func(t *testing.T) { tn := TypeName(tc.in) t.Parallel() t.Run("Element", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.el, tn.Element().String()) }) t.Run("Key", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.key, tn.Key().String()) }) t.Run("IsPointer", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.ptr == tc.in, tn.IsPointer()) }) t.Run("Pointer", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.ptr, tn.Pointer().String()) }) t.Run("Value", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.val, tn.Value().String()) }) }) } } func TestTypeName_Key_Malformed(t *testing.T) { t.Parallel() tn := TypeName("]malformed") assert.Empty(t, tn.Key().String()) } func TestScalarType_Invalid(t *testing.T) { t.Parallel() assert.Panics(t, func() { scalarType(pgs.ProtoType(0)) }) } func ExampleTypeName_Element() { types := []string{ "int", "*my.Type", "[]string", "map[string]*io.Reader", } for _, t := range types { fmt.Println(TypeName(t).Element()) } // Output: // int // *my.Type // string // *io.Reader } func ExampleTypeName_Key() { types := []string{ "int", "*my.Type", "[]string", "map[string]*io.Reader", } for _, t := range types { fmt.Println(TypeName(t).Key()) } // Output: // // // int // string } func ExampleTypeName_IsPointer() { types := []string{ "int", "*my.Type", "[]string", "map[string]*io.Reader", } for _, t := range types { fmt.Println(TypeName(t).IsPointer()) } // Output: // false // true // true // true } func ExampleTypeName_Pointer() { types := []string{ "int", "*my.Type", "[]string", "map[string]*io.Reader", } for _, t := range types { fmt.Println(TypeName(t).Pointer()) } // Output: // *int // *my.Type // []string // map[string]*io.Reader } func ExampleTypeName_Value() { types := []string{ "int", "*my.Type", "[]string", "map[string]*io.Reader", } for _, t := range types { fmt.Println(TypeName(t).Value()) } // Output: // int // my.Type // []string // map[string]*io.Reader } protoc-gen-star-2.0.3/message.go000066400000000000000000000202271440740147700165260ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // Message describes a proto message. Messages can be contained in either // another Message or File, and may house further Messages and/or Enums. While // all Fields technically live on the Message, some may be contained within // OneOf blocks. type Message interface { ParentEntity // Descriptor returns the underlying proto descriptor for this message Descriptor() *descriptor.DescriptorProto // Parent returns either the File or Message that directly contains this // Message. Parent() ParentEntity // Fields returns all fields on the message, including those contained within // OneOf blocks. Fields() []Field // NonOneOfFields returns all fields not contained within OneOf blocks. NonOneOfFields() []Field // OneOfFields returns only the fields contained within OneOf blocks. OneOfFields() []Field // SyntheticOneOfFields returns only the fields contained within synthetic OneOf blocks. // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/field_presence.md SyntheticOneOfFields() []Field // OneOfs returns the OneOfs contained within this Message. OneOfs() []OneOf // RealOneOfs returns the OneOfs contained within this Message. // This excludes synthetic OneOfs. // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/field_presence.md RealOneOfs() []OneOf // Extensions returns all of the Extensions applied to this Message. Extensions() []Extension // Dependents returns all of the messages where message is directly or // transitively used. Dependents() []Message // IsMapEntry identifies this message as a MapEntry. If true, this message is // not generated as code, and is used exclusively when marshaling a map field // to the wire format. IsMapEntry() bool // IsWellKnown identifies whether or not this Message is a WKT from the // `google.protobuf` package. Most official plugins special case these types // and they usually need to be handled differently. IsWellKnown() bool // WellKnownType returns the WellKnownType associated with this field. If // IsWellKnown returns false, UnknownWKT is returned. WellKnownType() WellKnownType setParent(p ParentEntity) addField(f Field) addExtension(e Extension) addOneOf(o OneOf) addDependent(message Message) getDependents(set map[string]Message) } type msg struct { desc *descriptor.DescriptorProto parent ParentEntity fqn string msgs, preservedMsgs []Message enums []Enum exts []Extension defExts []Extension fields []Field oneofs []OneOf maps []Message dependents []Message dependentsCache map[string]Message info SourceCodeInfo } func (m *msg) Name() Name { return Name(m.desc.GetName()) } func (m *msg) FullyQualifiedName() string { return m.fqn } func (m *msg) Syntax() Syntax { return m.parent.Syntax() } func (m *msg) Package() Package { return m.parent.Package() } func (m *msg) File() File { return m.parent.File() } func (m *msg) BuildTarget() bool { return m.parent.BuildTarget() } func (m *msg) SourceCodeInfo() SourceCodeInfo { return m.info } func (m *msg) Descriptor() *descriptor.DescriptorProto { return m.desc } func (m *msg) Parent() ParentEntity { return m.parent } func (m *msg) IsMapEntry() bool { return m.desc.GetOptions().GetMapEntry() } func (m *msg) Enums() []Enum { return m.enums } func (m *msg) Messages() []Message { return m.msgs } func (m *msg) Fields() []Field { return m.fields } func (m *msg) OneOfs() []OneOf { return m.oneofs } func (m *msg) MapEntries() []Message { return m.maps } func (m *msg) WellKnownType() WellKnownType { if m.Package().ProtoName() == WellKnownTypePackage { return LookupWKT(m.Name()) } return UnknownWKT } func (m *msg) IsWellKnown() bool { return m.WellKnownType().Valid() } func (m *msg) AllEnums() []Enum { es := m.Enums() for _, m := range m.msgs { es = append(es, m.AllEnums()...) } return es } func (m *msg) AllMessages() []Message { msgs := m.Messages() for _, sm := range m.msgs { msgs = append(msgs, sm.AllMessages()...) } return msgs } func (m *msg) NonOneOfFields() (f []Field) { for _, fld := range m.fields { if !fld.InOneOf() { f = append(f, fld) } } return f } func (m *msg) OneOfFields() (f []Field) { for _, o := range m.oneofs { f = append(f, o.Fields()...) } return f } func (m *msg) SyntheticOneOfFields() (f []Field) { for _, o := range m.oneofs { if o.IsSynthetic() { f = append(f, o.Fields()...) } } return f } func (m *msg) RealOneOfs() (r []OneOf) { for _, o := range m.oneofs { if !o.IsSynthetic() { r = append(r, o) } } return r } func (m *msg) Imports() (i []File) { // Mapping for avoiding duplicate entries mp := make(map[string]File, len(m.fields)) for _, f := range m.fields { for _, imp := range f.Imports() { mp[imp.File().Name().String()] = imp } } for _, f := range mp { i = append(i, f) } return } func (m *msg) getDependents(set map[string]Message) { m.populateDependentsCache() for fqn, d := range m.dependentsCache { set[fqn] = d } } func (m *msg) populateDependentsCache() { if m.dependentsCache != nil { return } m.dependentsCache = map[string]Message{} for _, dep := range m.dependents { m.dependentsCache[dep.FullyQualifiedName()] = dep dep.getDependents(m.dependentsCache) } } func (m *msg) Dependents() []Message { m.populateDependentsCache() return messageSetToSlice(m.FullyQualifiedName(), m.dependentsCache) } func (m *msg) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (bool, error) { return extension(m.desc.GetOptions(), desc, &ext) } func (m *msg) Extensions() []Extension { return m.exts } func (m *msg) DefinedExtensions() []Extension { return m.defExts } func (m *msg) accept(v Visitor) (err error) { if v == nil { return nil } if v, err = v.VisitMessage(m); err != nil || v == nil { return } for _, e := range m.enums { if err = e.accept(v); err != nil { return } } for _, sm := range m.msgs { if err = sm.accept(v); err != nil { return } } for _, f := range m.fields { if err = f.accept(v); err != nil { return } } for _, o := range m.oneofs { if err = o.accept(v); err != nil { return } } for _, ext := range m.defExts { if err = ext.accept(v); err != nil { return } } return } func (m *msg) addExtension(ext Extension) { m.exts = append(m.exts, ext) } func (m *msg) addDefExtension(ext Extension) { m.defExts = append(m.defExts, ext) } func (m *msg) setParent(p ParentEntity) { m.parent = p } func (m *msg) addEnum(e Enum) { e.setParent(m) m.enums = append(m.enums, e) } func (m *msg) addMessage(sm Message) { sm.setParent(m) m.msgs = append(m.msgs, sm) } func (m *msg) addField(f Field) { f.setMessage(m) m.fields = append(m.fields, f) } func (m *msg) addOneOf(o OneOf) { o.setMessage(m) m.oneofs = append(m.oneofs, o) } func (m *msg) addMapEntry(me Message) { me.setParent(m) m.maps = append(m.maps, me) } func (m *msg) addDependent(message Message) { m.dependents = append(m.dependents, message) } func (m *msg) childAtPath(path []int32) Entity { switch { case len(path) == 0: return m case len(path)%2 != 0: return nil } var child Entity switch path[0] { case messageTypeFieldPath: child = m.fields[path[1]] case messageTypeNestedTypePath: child = m.preservedMsgs[path[1]] case messageTypeEnumTypePath: child = m.enums[path[1]] case messageTypeOneofDeclPath: child = m.oneofs[path[1]] default: return nil } return child.childAtPath(path[2:]) } func (m *msg) addSourceCodeInfo(info SourceCodeInfo) { m.info = info } func messageSetToSlice(name string, set map[string]Message) []Message { dependents := make([]Message, 0, len(set)) for fqn, d := range set { if fqn != name { dependents = append(dependents, d) } } return dependents } var _ Message = (*msg)(nil) protoc-gen-star-2.0.3/message_test.go000066400000000000000000000226601440740147700175700ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/reflect/protodesc" descriptor "google.golang.org/protobuf/types/descriptorpb" any "google.golang.org/protobuf/types/known/anypb" ) func TestMsg_Name(t *testing.T) { t.Parallel() m := &msg{desc: &descriptor.DescriptorProto{Name: proto.String("msg")}} assert.Equal(t, "msg", m.Name().String()) } func TestMsg_FullyQualifiedName(t *testing.T) { t.Parallel() m := &msg{fqn: "msg"} assert.Equal(t, m.fqn, m.FullyQualifiedName()) } func TestMsg_Syntax(t *testing.T) { t.Parallel() m := &msg{} f := dummyFile() f.addMessage(m) assert.Equal(t, f.Syntax(), m.Syntax()) } func TestMsg_Package(t *testing.T) { t.Parallel() m := &msg{} f := dummyFile() f.addMessage(m) assert.NotNil(t, m.Package()) assert.Equal(t, f.Package(), m.Package()) } func TestMsg_File(t *testing.T) { t.Parallel() m := &msg{} pm := dummyMsg() pm.addMessage(m) assert.NotNil(t, m.File()) assert.Equal(t, pm.File(), m.File()) } func TestMsg_BuildTarget(t *testing.T) { t.Parallel() m := &msg{} f := dummyFile() f.addMessage(m) assert.False(t, m.BuildTarget()) f.buildTarget = true assert.True(t, m.BuildTarget()) } func TestMsg_Descriptor(t *testing.T) { t.Parallel() m := &msg{desc: &descriptor.DescriptorProto{}} assert.Equal(t, m.desc, m.Descriptor()) } func TestMsg_Parent(t *testing.T) { t.Parallel() m := &msg{} pm := dummyMsg() pm.addMessage(m) assert.Equal(t, pm, m.Parent()) } func TestMsg_IsMapEntry(t *testing.T) { t.Parallel() m := &msg{desc: &descriptor.DescriptorProto{}} assert.False(t, m.IsMapEntry()) m.desc.Options = &descriptor.MessageOptions{ MapEntry: proto.Bool(true), } assert.True(t, m.IsMapEntry()) } func TestMsg_Enums(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.Enums()) sm := &msg{} sm.addEnum(&enum{}) m.addMessage(sm) m.addEnum(&enum{}) assert.Len(t, m.Enums(), 1) } func TestMsg_AllEnums(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.AllEnums()) sm := &msg{} sm.addEnum(&enum{}) m.addMessage(sm) m.addEnum(&enum{}) assert.Len(t, m.AllEnums(), 2) } func TestMsg_Messages(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.Messages()) sm := &msg{} sm.addMessage(&msg{}) m.addMessage(sm) assert.Len(t, m.Messages(), 1) } func TestMsg_AllMessages(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.AllMessages()) sm := &msg{} sm.addMessage(&msg{}) m.addMessage(sm) assert.Len(t, m.AllMessages(), 2) } func TestMsg_MapEntries(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.MapEntries()) m.addMapEntry(&msg{}) assert.Len(t, m.MapEntries(), 1) } func TestMsg_Fields(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.Fields()) m.addField(&field{}) m.addField(&field{oneof: &oneof{}}) assert.Len(t, m.Fields(), 2) } func TestMsg_NonOneOfFields(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.NonOneOfFields()) m.addField(&field{}) m.addField(&field{oneof: &oneof{}}) m.addField(&field{}) assert.Len(t, m.NonOneOfFields(), 2) } func TestMsg_OneOfFields(t *testing.T) { t.Parallel() o := &oneof{} o.addField(&field{}) m := &msg{} m.addField(&field{}) m.addField(&field{}) assert.Empty(t, m.OneOfFields()) m.addOneOf(o) assert.Len(t, m.OneOfFields(), 1) } func TestMsg_OneOfs(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.OneOfs()) m.addOneOf(&oneof{}) assert.Len(t, m.OneOfs(), 1) } func TestMsg_SyntheticOneOfFields_And_RealOneOfs(t *testing.T) { t.Parallel() oSyn := &oneof{} oSyn.flds = []Field{dummyOneOfField(true)} oSyn.flds[0].setOneOf(oSyn) oReal := &oneof{} oReal.flds = []Field{dummyField(), dummyField()} oReal.flds[0].setOneOf(oReal) oReal.flds[1].setOneOf(oReal) // no one offs m := dummyMsg() assert.Len(t, m.OneOfFields(), 0, "oneof fields") assert.Len(t, m.SyntheticOneOfFields(), 0, "synthetic oneof fields") assert.Len(t, m.OneOfs(), 0, "oneofs") assert.Len(t, m.RealOneOfs(), 0, "real oneofs") // one real oneof m.addField(oReal.flds[0]) m.addField(oReal.flds[1]) m.addOneOf(oReal) assert.Len(t, m.OneOfFields(), 2, "oneof fields") assert.Len(t, m.SyntheticOneOfFields(), 0, "synthetic oneof fields") assert.Len(t, m.OneOfs(), 1, "oneofs") assert.Len(t, m.RealOneOfs(), 1, "real oneofs") // one real, one synthetic oneof m.addField(oSyn.flds[0]) m.addOneOf(oSyn) assert.Len(t, m.OneOfFields(), 3, "oneof fields") assert.Len(t, m.SyntheticOneOfFields(), 1, "synthetic oneof fields") assert.Len(t, m.OneOfs(), 2, "oneofs") assert.Len(t, m.RealOneOfs(), 1, "real oneofs") // one synthetic oneof m = dummyMsg() m.addField(oSyn.flds[0]) m.addOneOf(oSyn) assert.Len(t, m.OneOfFields(), 1, "oneof fields") assert.Len(t, m.SyntheticOneOfFields(), 1, "synthetic oneof fields") assert.Len(t, m.OneOfs(), 1, "oneofs") assert.Len(t, m.RealOneOfs(), 0, "real oneofs") } func TestMsg_Extension(t *testing.T) { // cannot be parallel m := &msg{desc: &descriptor.DescriptorProto{}} assert.NotPanics(t, func() { m.Extension(nil, nil) }) } func TestMsg_Extensions(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.Extensions()) ext := &ext{} m.addExtension(ext) assert.Len(t, m.Extensions(), 1) } func TestMsg_DefinedExtensions(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.DefinedExtensions()) ext := &ext{} m.addDefExtension(ext) assert.Len(t, m.DefinedExtensions(), 1) } func TestMsg_Accept(t *testing.T) { t.Parallel() m := &msg{} m.addMessage(&msg{}) m.addEnum(&enum{}) m.addField(&field{}) m.addOneOf(&oneof{}) m.addDefExtension(&ext{}) assert.NoError(t, m.accept(nil)) v := &mockVisitor{} assert.NoError(t, m.accept(v)) assert.Equal(t, 1, v.message) assert.Zero(t, v.enum) assert.Zero(t, v.field) assert.Zero(t, v.oneof) assert.Zero(t, v.extension) v.Reset() v.v = v v.err = errors.New("") assert.Error(t, m.accept(v)) assert.Equal(t, 1, v.message) assert.Zero(t, v.enum) assert.Zero(t, v.field) assert.Zero(t, v.oneof) assert.Zero(t, v.extension) v.Reset() assert.NoError(t, m.accept(v)) assert.Equal(t, 2, v.message) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.field) assert.Equal(t, 1, v.oneof) assert.Equal(t, 1, v.extension) v.Reset() m.addDefExtension(&mockExtension{err: errors.New("")}) assert.Error(t, m.accept(v)) assert.Equal(t, 2, v.message) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.field) assert.Equal(t, 1, v.oneof) assert.Equal(t, 2, v.extension) v.Reset() m.addOneOf(&mockOneOf{err: errors.New("")}) assert.Error(t, m.accept(v)) assert.Equal(t, 2, v.message) assert.Equal(t, 1, v.enum) assert.Equal(t, 1, v.field) assert.Equal(t, 2, v.oneof) assert.Zero(t, v.extension) v.Reset() m.addField(&mockField{err: errors.New("")}) assert.Error(t, m.accept(v)) assert.Equal(t, 2, v.message) assert.Equal(t, 1, v.enum) assert.Equal(t, 2, v.field) assert.Zero(t, v.oneof) assert.Zero(t, v.extension) v.Reset() m.addMessage(&mockMessage{err: errors.New("")}) assert.Error(t, m.accept(v)) assert.Equal(t, 3, v.message) assert.Equal(t, 1, v.enum) assert.Zero(t, v.field) assert.Zero(t, v.oneof) assert.Zero(t, v.extension) v.Reset() m.addEnum(&mockEnum{err: errors.New("")}) assert.Error(t, m.accept(v)) assert.Equal(t, 2, v.enum) assert.Equal(t, 1, v.message) assert.Zero(t, v.field) assert.Zero(t, v.oneof) assert.Zero(t, v.extension) } func TestMsg_Imports(t *testing.T) { t.Parallel() m := &msg{} assert.Empty(t, m.Imports()) m.addField(&mockField{i: []File{&file{}, &file{}}}) assert.Len(t, m.Imports(), 1) nf := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} m.addField(&mockField{i: []File{nf, nf}}) assert.Len(t, m.Imports(), 2) } func TestMsg_Dependents(t *testing.T) { t.Parallel() pkg := dummyPkg() f := &file{ pkg: pkg, desc: &descriptor.FileDescriptorProto{ Package: proto.String(pkg.ProtoName().String()), Syntax: proto.String(string(Proto3)), Name: proto.String("test_file.proto"), }, } m := &msg{parent: f} m.fqn = fullyQualifiedName(f, m) m2 := dummyMsg() m.addDependent(m2) deps := m.Dependents() assert.Len(t, deps, 1) assert.Contains(t, deps, m2) } func TestMsg_ChildAtPath(t *testing.T) { t.Parallel() m := &msg{} assert.Equal(t, m, m.childAtPath(nil)) assert.Nil(t, m.childAtPath([]int32{1})) assert.Nil(t, m.childAtPath([]int32{999, 456})) } func TestMsg_WellKnownType(t *testing.T) { d := (&any.Any{}).ProtoReflect().Descriptor() fd := protodesc.ToFileDescriptorProto(d.ParentFile()) md := protodesc.ToDescriptorProto(d) p := &pkg{fd: fd} f := &file{desc: fd} m := &msg{desc: md} f.addMessage(m) p.addFile(f) assert.True(t, m.IsWellKnown()) assert.Equal(t, AnyWKT, m.WellKnownType()) m.desc.Name = proto.String("Foobar") assert.False(t, m.IsWellKnown()) assert.Equal(t, UnknownWKT, m.WellKnownType()) m.desc.Name = proto.String("Any") f.desc.Package = proto.String("fizz.buzz") assert.False(t, m.IsWellKnown()) assert.Equal(t, UnknownWKT, m.WellKnownType()) } type mockMessage struct { Message i []File p ParentEntity err error } func (m *mockMessage) Imports() []File { return m.i } func (m *mockMessage) setParent(p ParentEntity) { m.p = p } func (m *mockMessage) accept(v Visitor) error { _, err := v.VisitMessage(m) if m.err != nil { return m.err } return err } func dummyMsg() *msg { f := dummyFile() m := &msg{ desc: &descriptor.DescriptorProto{Name: proto.String("message")}, } f.addMessage(m) return m } protoc-gen-star-2.0.3/method.go000066400000000000000000000057431440740147700163700ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // Method describes a method on a proto service type Method interface { Entity // Descriptor returns the underlying proto descriptor for this. Descriptor() *descriptor.MethodDescriptorProto // Service returns the parent service for this. Service() Service // Input returns the Message representing the input type for this. Input() Message // Output returns the Message representing the output type for this. Output() Message // ClientStreaming indicates if this method allows clients to stream inputs. ClientStreaming() bool // ServerStreaming indicates if this method allows servers to stream outputs. ServerStreaming() bool setService(Service) } type method struct { desc *descriptor.MethodDescriptorProto fqn string service Service in, out Message info SourceCodeInfo } func (m *method) Name() Name { return Name(m.desc.GetName()) } func (m *method) FullyQualifiedName() string { return m.fqn } func (m *method) Syntax() Syntax { return m.service.Syntax() } func (m *method) Package() Package { return m.service.Package() } func (m *method) File() File { return m.service.File() } func (m *method) BuildTarget() bool { return m.service.BuildTarget() } func (m *method) SourceCodeInfo() SourceCodeInfo { return m.info } func (m *method) Descriptor() *descriptor.MethodDescriptorProto { return m.desc } func (m *method) Service() Service { return m.service } func (m *method) Input() Message { return m.in } func (m *method) Output() Message { return m.out } func (m *method) ClientStreaming() bool { return m.desc.GetClientStreaming() } func (m *method) ServerStreaming() bool { return m.desc.GetServerStreaming() } func (m *method) BiDirStreaming() bool { return m.ClientStreaming() && m.ServerStreaming() } func (m *method) Imports() (i []File) { mine := m.File().Name() input := m.Input().File() output := m.Output().File() if mine != input.Name() { i = append(i, input) } if mine != output.Name() && input.Name() != output.Name() { i = append(i, output) } return } func (m *method) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (ok bool, err error) { return extension(m.desc.GetOptions(), desc, &ext) } func (m *method) accept(v Visitor) (err error) { if v == nil { return } _, err = v.VisitMethod(m) return } func (m *method) setService(s Service) { m.service = s } func (m *method) childAtPath(path []int32) Entity { if len(path) == 0 { return m } return nil } func (m *method) addSourceCodeInfo(info SourceCodeInfo) { m.info = info } var _ Method = (*method)(nil) protoc-gen-star-2.0.3/method_test.go000066400000000000000000000076321440740147700174260ustar00rootroot00000000000000package pgs import ( "testing" "errors" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestMethod_Name(t *testing.T) { t.Parallel() m := &method{desc: &descriptor.MethodDescriptorProto{Name: proto.String("foo")}} assert.Equal(t, "foo", m.Name().String()) } func TestMethod_FullyQualifiedName(t *testing.T) { t.Parallel() m := &method{fqn: "fizz"} assert.Equal(t, m.fqn, m.FullyQualifiedName()) } func TestMethod_Syntax(t *testing.T) { t.Parallel() m := &method{} s := dummyService() s.addMethod(m) assert.Equal(t, s.Syntax(), m.Syntax()) } func TestMethod_Package(t *testing.T) { t.Parallel() m := &method{} s := dummyService() s.addMethod(m) assert.NotNil(t, m.Package()) assert.Equal(t, s.Package(), m.Package()) } func TestMethod_File(t *testing.T) { t.Parallel() m := &method{} s := dummyService() s.addMethod(m) assert.NotNil(t, m.File()) assert.Equal(t, s.File(), m.File()) } func TestMethod_BuildTarget(t *testing.T) { t.Parallel() m := &method{} s := dummyService() s.addMethod(m) assert.False(t, m.BuildTarget()) s.setFile(&file{buildTarget: true}) assert.True(t, m.BuildTarget()) } func TestMethod_Descriptor(t *testing.T) { t.Parallel() m := &method{desc: &descriptor.MethodDescriptorProto{}} assert.Equal(t, m.desc, m.Descriptor()) } func TestMethod_Service(t *testing.T) { t.Parallel() m := &method{} s := dummyService() s.addMethod(m) assert.Equal(t, s, m.Service()) } func TestMethod_Input(t *testing.T) { t.Parallel() m := &method{in: &msg{}} assert.Equal(t, m.in, m.Input()) } func TestMethod_Output(t *testing.T) { t.Parallel() m := &method{out: &msg{}} assert.Equal(t, m.out, m.Output()) } func TestMethod_ClientStreaming(t *testing.T) { t.Parallel() m := &method{desc: &descriptor.MethodDescriptorProto{}} assert.False(t, m.ClientStreaming()) m.desc.ClientStreaming = proto.Bool(true) assert.True(t, m.ClientStreaming()) } func TestMethod_ServerStreaming(t *testing.T) { t.Parallel() m := &method{desc: &descriptor.MethodDescriptorProto{}} assert.False(t, m.ServerStreaming()) m.desc.ServerStreaming = proto.Bool(true) assert.True(t, m.ServerStreaming()) } func TestMethod_BiDirStreaming(t *testing.T) { t.Parallel() m := &method{desc: &descriptor.MethodDescriptorProto{}} assert.False(t, m.BiDirStreaming()) m.desc.ServerStreaming = proto.Bool(true) assert.False(t, m.BiDirStreaming()) m.desc.ServerStreaming = proto.Bool(false) m.desc.ClientStreaming = proto.Bool(true) assert.False(t, m.BiDirStreaming()) m.desc.ServerStreaming = proto.Bool(true) assert.True(t, m.BiDirStreaming()) } func TestMethod_Imports(t *testing.T) { t.Parallel() s := dummyService() m := &method{ in: dummyMsg(), out: dummyMsg(), } s.addMethod(m) f := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} assert.Empty(t, m.Imports()) m.in = &msg{parent: f} assert.Len(t, m.Imports(), 1) m.out = &msg{parent: &file{}} assert.Len(t, m.Imports(), 2) m.out = &msg{parent: f} assert.Len(t, m.Imports(), 1) } func TestMethod_Extension(t *testing.T) { // cannot be parallel m := &method{desc: &descriptor.MethodDescriptorProto{}} assert.NotPanics(t, func() { m.Extension(nil, nil) }) } func TestMethod_Accept(t *testing.T) { t.Parallel() m := &method{} assert.Nil(t, m.accept(nil)) v := &mockVisitor{err: errors.New("foo")} assert.Error(t, m.accept(v)) assert.Equal(t, 1, v.method) } func TestMethod_ChildAtPath(t *testing.T) { t.Parallel() m := &method{} assert.Equal(t, m, m.childAtPath(nil)) assert.Nil(t, m.childAtPath([]int32{1})) } type mockMethod struct { Method i []File s Service err error } func (m *mockMethod) Imports() []File { return m.i } func (m *mockMethod) setService(s Service) { m.s = s } func (m *mockMethod) accept(v Visitor) error { _, err := v.VisitMethod(m) if m.err != nil { return m.err } return err } protoc-gen-star-2.0.3/module.go000066400000000000000000000223031440740147700163640ustar00rootroot00000000000000package pgs import "os" // Module describes the interface for a domain-specific code generation module // that can be registered with the PG* generator. type Module interface { // The Name of the Module, used when establishing the build context and used // as the base prefix for all debugger output. Name() string // InitContext is called on a Module with a pre-configured BuildContext that // should be stored and used by the Module. InitContext(c BuildContext) // Execute is called on the module with the target Files as well as all // loaded Packages from the gatherer. The module should return a slice of // Artifacts that it would like to be generated. Execute(targets map[string]File, packages map[string]Package) []Artifact } // ModuleBase provides utility methods and a base implementation for a // protoc-gen-star Module. ModuleBase should be used as an anonymously embedded // field of an actual Module implementation. The only methods that need to be // overridden are Name and Execute. // // type MyModule { // *pgs.ModuleBase // } // // func InitMyModule() *MyModule { return &MyModule{ &pgs.ModuleBase{} } } // // func (m *MyModule) Name() string { return "MyModule" } // // func (m *MyModule) Execute(...) []pgs.Artifact { ... } type ModuleBase struct { BuildContext artifacts []Artifact } // InitContext populates this Module with the BuildContext from the parent // Generator, allowing for easy debug logging, error checking, and output path // management. This method is called prior to Execute for modules registered // with the generator. func (m *ModuleBase) InitContext(c BuildContext) { m.BuildContext = c m.Debug("initializing") } // Name satisfies the Module interface, however this method will panic and must // be overridden by a parent struct. func (m *ModuleBase) Name() string { panic("Name method is not implemented for this module") } // Execute satisfies the Module interface, however this method will fail and // must be overridden by a parent struct. func (m *ModuleBase) Execute(targets map[string]File, packages map[string]Package) []Artifact { m.Fail("Execute method is not implemented for this module") return m.Artifacts() } // Push adds a prefix to the Module's BuildContext. Pop should be called when // the context is complete. func (m *ModuleBase) Push(prefix string) BuildContext { m.BuildContext = m.BuildContext.Push(prefix) return m } // PushDir changes the OutputPath of the Module's BuildContext. Pop (or PopDir) // should be called when that context is complete. func (m *ModuleBase) PushDir(dir string) BuildContext { m.BuildContext = m.BuildContext.PushDir(dir) return m } // Pop removes the last push from the Module's BuildContext. This method should // only be called after a paired Push or PushDir. func (m *ModuleBase) Pop() BuildContext { m.BuildContext = m.BuildContext.Pop() return m } // PopDir removes the last PushDir from the Module's BuildContext. This method // should only be called after a paired PushDir. func (m *ModuleBase) PopDir() BuildContext { m.BuildContext = m.BuildContext.PopDir() return m } // Artifacts returns the slice of generation artifacts that have been captured // by the Module. This method should/can be the return value of its Execute // method. Subsequent calls will return a nil slice until more artifacts are // added. func (m *ModuleBase) Artifacts() []Artifact { out := m.artifacts m.artifacts = nil return out } // AddArtifact adds an Artifact to this Module's collection of generation // artifacts. This method is available as a convenience but the other Add & // Overwrite methods should be used preferentially. func (m *ModuleBase) AddArtifact(a ...Artifact) { m.artifacts = append(m.artifacts, a...) } // AddGeneratorFile adds a file with the provided name and contents to the code // generation response payload to protoc. Name must be a path relative to and // within the protoc-plugin's output destination, which may differ from the // BuildContext's OutputPath value. If another Module or Plugin has added a // file with the same name, protoc will produce an error. func (m *ModuleBase) AddGeneratorFile(name, content string) { m.AddArtifact(GeneratorFile{ Name: name, Contents: content, }) } // OverwriteGeneratorFile behaves the same as AddGeneratorFile, however if a // previously executed Module has created a file with the same name, it will be // overwritten with this one. func (m *ModuleBase) OverwriteGeneratorFile(name, content string) { m.AddArtifact(GeneratorFile{ Name: name, Contents: content, Overwrite: true, }) } // AddGeneratorTemplateFile behaves the same as AddGeneratorFile, however the // contents are rendered from the provided tpl and data. func (m *ModuleBase) AddGeneratorTemplateFile(name string, tpl Template, data interface{}) { m.AddArtifact(GeneratorTemplateFile{ Name: name, TemplateArtifact: TemplateArtifact{ Template: tpl, Data: data, }, }) } // OverwriteGeneratorTemplateFile behaves the same as OverwriteGeneratorFile, // however the contents are rendered from the provided tpl and data. func (m *ModuleBase) OverwriteGeneratorTemplateFile(name string, tpl Template, data interface{}) { m.AddArtifact(GeneratorTemplateFile{ Name: name, Overwrite: true, TemplateArtifact: TemplateArtifact{ Template: tpl, Data: data, }, }) } // AddGeneratorAppend attempts to append content to the specified file name. // Name must be a path relative to and within the protoc-plugin's output // destination, which may differ from the BuildContext's OutputPath value. If // the file is not generated by this protoc-plugin, execution will fail. func (m *ModuleBase) AddGeneratorAppend(name, content string) { m.AddArtifact(GeneratorAppend{ FileName: name, Contents: content, }) } // AddGeneratorTemplateAppend behaves the same as AddGeneratorAppend, however // the contents are rendered from the provided tpl and data. func (m *ModuleBase) AddGeneratorTemplateAppend(name string, tpl Template, data interface{}) { m.AddArtifact(GeneratorTemplateAppend{ FileName: name, TemplateArtifact: TemplateArtifact{ Template: tpl, Data: data, }, }) } // AddGeneratorInjection attempts to inject content into the file with name at // the specified insertion point. Name must be a path relative to and within // the protoc-plugin's output destination, which may differ from the // BuildContext's OutputPath value. The file does not need to be generated by // this protoc-plugin but the generating plugin must be called first in the // protoc execution. // // See: https://godoc.org/github.com/golang/protobuf/protoc-gen-go/plugin#CodeGeneratorResponse_File func (m *ModuleBase) AddGeneratorInjection(name, point, content string) { m.AddArtifact(GeneratorInjection{ FileName: name, InsertionPoint: point, Contents: content, }) } // AddGeneratorTemplateInjection behaves the same as AddGeneratorInjection, // however the contents are rendered from the provided tpl and data. func (m *ModuleBase) AddGeneratorTemplateInjection(name, point string, tpl Template, data interface{}) { m.AddArtifact(GeneratorTemplateInjection{ FileName: name, InsertionPoint: point, TemplateArtifact: TemplateArtifact{ Template: tpl, Data: data, }, }) } // AddCustomFile creates a file directly on the file system with the provided // content and perms. Unlike AddGeneratorFile, this method does not use protoc // to generate the file. If name is a relative path, it is related to the // directory in which protoc was executed; name can also be an absolute path. // If a file already exists with the specified name, the file will not be // created and there will be no generation error. func (m *ModuleBase) AddCustomFile(name, content string, perms os.FileMode) { m.AddArtifact(CustomFile{ Name: name, Contents: content, Perms: perms, }) } // OverwriteCustomFile behaves the same as AddCustomFile, however if the file // already exists, it will be overwritten with this one. func (m *ModuleBase) OverwriteCustomFile(name, content string, perms os.FileMode) { m.AddArtifact(CustomFile{ Name: name, Contents: content, Perms: perms, Overwrite: true, }) } // AddCustomTemplateFile behaves the same as AddCustomFile, however the // contents are rendered from the provided tpl and data. func (m *ModuleBase) AddCustomTemplateFile(name string, tpl Template, data interface{}, perms os.FileMode) { m.AddArtifact(CustomTemplateFile{ Name: name, Perms: perms, TemplateArtifact: TemplateArtifact{ Template: tpl, Data: data, }, }) } // OverwriteCustomTemplateFile behaves the same as OverwriteCustomFile, however // the contents are rendered from the provided tpl and data. func (m *ModuleBase) OverwriteCustomTemplateFile(name string, tpl Template, data interface{}, perms os.FileMode) { m.AddArtifact(CustomTemplateFile{ Name: name, Perms: perms, Overwrite: true, TemplateArtifact: TemplateArtifact{ Template: tpl, Data: data, }, }) } // AddError adds a string to the `errors` field of the created // CodeGeneratorResponse. Multiple calls to AddError will cause the errors to // be concatenated (separated by "; "). func (m *ModuleBase) AddError(message string) { m.AddArtifact(GeneratorError{Message: message}) } var _ Module = (*ModuleBase)(nil) protoc-gen-star-2.0.3/module_test.go000066400000000000000000000140441440740147700174260ustar00rootroot00000000000000package pgs import ( "testing" "text/template" "github.com/stretchr/testify/assert" ) type mockModule struct { *ModuleBase name string executed bool } func newMockModule() *mockModule { return &mockModule{ModuleBase: &ModuleBase{}} } func (m *mockModule) Name() string { return m.name } func (m *mockModule) Execute(targets map[string]File, packages map[string]Package) []Artifact { m.executed = true return nil } func TestModuleBase_InitContext(t *testing.T) { t.Parallel() m := new(ModuleBase) assert.Nil(t, m.BuildContext) bc := Context(InitMockDebugger(), Parameters{}, ".") m.InitContext(bc) assert.NotNil(t, m.BuildContext) } func TestModuleBase_Name(t *testing.T) { t.Parallel() m := new(ModuleBase) assert.Panics(t, func() { m.Name() }) } func TestModuleBase_Execute(t *testing.T) { t.Parallel() m := new(ModuleBase) d := InitMockDebugger() m.InitContext(Context(d, Parameters{}, ".")) assert.NotPanics(t, func() { m.Execute(nil, nil) }) assert.True(t, d.Failed()) } func TestModuleBase_PushPop(t *testing.T) { t.Parallel() m := new(ModuleBase) m.InitContext(Context(InitMockDebugger(), Parameters{}, ".")) m.Push("foo") m.Pop() } func TestModuleBase_PushPopDir(t *testing.T) { t.Parallel() m := new(ModuleBase) m.InitContext(Context(InitMockDebugger(), Parameters{}, "foo")) m.PushDir("bar") assert.Equal(t, "foo/bar", m.OutputPath()) m.PopDir() assert.Equal(t, "foo", m.OutputPath()) } func TestModuleBase_Artifacts(t *testing.T) { t.Parallel() arts := []Artifact{nil, nil, nil} m := &ModuleBase{artifacts: arts} assert.Equal(t, arts, m.Artifacts()) assert.Empty(t, m.Artifacts()) } func TestModuleBase_AddArtifact(t *testing.T) { t.Parallel() m := new(ModuleBase) assert.Empty(t, m.Artifacts()) m.AddArtifact(nil, nil) assert.Len(t, m.Artifacts(), 2) } func TestModuleBase_AddGeneratorFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddGeneratorFile("foo", "bar") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorFile{ Name: "foo", Contents: "bar", }, arts[0]) } func TestModuleBase_OverwriteGeneratorFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.OverwriteGeneratorFile("foo", "bar") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorFile{ Name: "foo", Contents: "bar", Overwrite: true, }, arts[0]) } func TestModuleBase_AddGeneratorTemplateFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddGeneratorTemplateFile("foo", template.New("bar"), "baz") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorTemplateFile{ Name: "foo", TemplateArtifact: TemplateArtifact{ Template: template.New("bar"), Data: "baz", }, }, arts[0]) } func TestModuleBase_OverwriteGeneratorTemplateFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.OverwriteGeneratorTemplateFile("foo", template.New("bar"), "baz") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorTemplateFile{ Name: "foo", Overwrite: true, TemplateArtifact: TemplateArtifact{ Template: template.New("bar"), Data: "baz", }, }, arts[0]) } func TestModuleBase_AddGeneratorAppend(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddGeneratorAppend("foo", "bar") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorAppend{ FileName: "foo", Contents: "bar", }, arts[0]) } func TestModuleBase_AddGeneratorTemplateAppend(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddGeneratorTemplateAppend("foo", template.New("bar"), "baz") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorTemplateAppend{ FileName: "foo", TemplateArtifact: TemplateArtifact{ Template: template.New("bar"), Data: "baz", }, }, arts[0]) } func TestModuleBase_AddGeneratorInjection(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddGeneratorInjection("foo", "bar", "baz") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorInjection{ FileName: "foo", InsertionPoint: "bar", Contents: "baz", }, arts[0]) } func TestModuleBase_AddGeneratorTemplateInjection(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddGeneratorTemplateInjection("foo", "bar", template.New("fizz"), "buzz") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorTemplateInjection{ FileName: "foo", InsertionPoint: "bar", TemplateArtifact: TemplateArtifact{ Template: template.New("fizz"), Data: "buzz", }, }, arts[0]) } func TestModuleBase_AddCustomFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddCustomFile("foo", "bar", 0765) arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, CustomFile{ Name: "foo", Contents: "bar", Perms: 0765, }, arts[0]) } func TestModuleBase_OverwriteCustomFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.OverwriteCustomFile("foo", "bar", 0765) arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, CustomFile{ Name: "foo", Contents: "bar", Overwrite: true, Perms: 0765, }, arts[0]) } func TestModuleBase_AddCustomTemplateFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddCustomTemplateFile("foo", template.New("bar"), "baz", 0765) arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, CustomTemplateFile{ Name: "foo", Perms: 0765, TemplateArtifact: TemplateArtifact{ Template: template.New("bar"), Data: "baz", }, }, arts[0]) } func TestModuleBase_OverwriteCustomTemplateFile(t *testing.T) { t.Parallel() m := new(ModuleBase) m.OverwriteCustomTemplateFile("foo", template.New("bar"), "baz", 0765) arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, CustomTemplateFile{ Name: "foo", Overwrite: true, Perms: 0765, TemplateArtifact: TemplateArtifact{ Template: template.New("bar"), Data: "baz", }, }, arts[0]) } func TestModuleBase_AddError(t *testing.T) { t.Parallel() m := new(ModuleBase) m.AddError("bohoo") arts := m.Artifacts() assert.Len(t, arts, 1) assert.Equal(t, GeneratorError{Message: "bohoo"}, arts[0]) } protoc-gen-star-2.0.3/name.go000066400000000000000000000154141440740147700160240ustar00rootroot00000000000000package pgs import ( "bytes" "fmt" "strings" "unicode" "unicode/utf8" "path/filepath" ) // A Name describes an identifier of an Entity (Message, Field, Enum, Service, // Field). It can be converted to multiple forms using the provided helper // methods, or a custom transform can be used to modify its behavior. type Name string // String satisfies the strings.Stringer interface. func (n Name) String() string { return string(n) } // UpperCamelCase converts Name n to upper camelcase, where each part is // title-cased and concatenated with no separator. func (n Name) UpperCamelCase() Name { return n.Transform(strings.Title, strings.Title, "") } // LowerCamelCase converts Name n to lower camelcase, where each part is // title-cased and concatenated with no separator except the first which is // lower-cased. func (n Name) LowerCamelCase() Name { return n.Transform(strings.Title, strings.ToLower, "") } // ScreamingSnakeCase converts Name n to screaming-snake-case, where each part // is all-caps and concatenated with underscores. func (n Name) ScreamingSnakeCase() Name { return n.Transform(strings.ToUpper, strings.ToUpper, "_") } // LowerSnakeCase converts Name n to lower-snake-case, where each part is // lower-cased and concatenated with underscores. func (n Name) LowerSnakeCase() Name { return n.Transform(strings.ToLower, strings.ToLower, "_") } // UpperSnakeCase converts Name n to upper-snake-case, where each part is // title-cased and concatenated with underscores. func (n Name) UpperSnakeCase() Name { return n.Transform(strings.Title, strings.Title, "_") } // SnakeCase converts Name n to snake-case, where each part preserves its // capitalization and concatenated with underscores. func (n Name) SnakeCase() Name { return n.Transform(ID, ID, "_") } // LowerDotNotation converts Name n to lower dot notation, where each part is // lower-cased and concatenated with periods. func (n Name) LowerDotNotation() Name { return n.Transform(strings.ToLower, strings.ToLower, ".") } // UpperDotNotation converts Name n to upper dot notation, where each part is // title-cased and concatenated with periods. func (n Name) UpperDotNotation() Name { return n.Transform(strings.Title, strings.Title, ".") } // Split breaks apart Name n into its constituent components. Precedence // follows dot notation, then underscores (excluding underscore prefixes), then // camelcase. Numbers are treated as standalone components. func (n Name) Split() (parts []string) { ns := string(n) switch { case ns == "": return []string{""} case strings.LastIndex(ns, ".") >= 0: return strings.Split(ns, ".") case strings.LastIndex(ns, "_") > 0: // leading underscore does not count parts = strings.Split(ns, "_") if parts[0] == "" { parts[1] = "_" + parts[1] return parts[1:] } return default: // camelCase buf := &bytes.Buffer{} var capt, lodash, num bool for _, r := range ns { uc := unicode.IsUpper(r) || unicode.IsTitle(r) dg := unicode.IsDigit(r) if r == '_' && buf.Len() == 0 && len(parts) == 0 { lodash = true } if uc && !capt && buf.Len() > 0 && !lodash { // new upper letter parts = append(parts, buf.String()) buf.Reset() } else if dg && !num && buf.Len() > 0 && !lodash { // new digit parts = append(parts, buf.String()) buf.Reset() } else if !uc && capt && buf.Len() > 1 { // upper to lower if ss := buf.String(); len(ss) > 1 && (len(ss) != 2 || ss[0] != '_') { pr, _ := utf8.DecodeLastRuneInString(ss) parts = append(parts, strings.TrimSuffix(ss, string(pr))) buf.Reset() buf.WriteRune(pr) } } else if !dg && num && buf.Len() >= 1 { parts = append(parts, buf.String()) buf.Reset() } num = dg capt = uc buf.WriteRune(r) } parts = append(parts, buf.String()) return } } // NameTransformer is a function that mutates a string. Many of the methods in // the standard strings package satisfy this signature. type NameTransformer func(string) string // ID is a NameTransformer that does not mutate the string. func ID(s string) string { return s } // Chain combines the behavior of two Transformers into one. If multiple // transformations need to be performed on a Name, this method should be used // to reduce it to a single transformation before applying. func (n NameTransformer) Chain(t NameTransformer) NameTransformer { return func(s string) string { return t(n(s)) } } // Transform applies a transformation to the parts of Name n, returning a new // Name. Transformer first is applied to the first part, with mod applied to // all subsequent ones. The parts are then concatenated with the separator sep. // For optimal efficiency, multiple NameTransformers should be Chained together // before calling Transform. func (n Name) Transform(mod, first NameTransformer, sep string) Name { parts := n.Split() for i, p := range parts { if i == 0 { parts[i] = first(p) } else { parts[i] = mod(p) } } return Name(strings.Join(parts, sep)) } // A FilePath describes the name of a file or directory. This type simplifies // path related operations. type FilePath string // JoinPaths is an convenient alias around filepath.Join, to easily create // FilePath types. func JoinPaths(elem ...string) FilePath { return FilePath(filepath.Join(elem...)) } // String satisfies the strings.Stringer interface. func (n FilePath) String() string { return string(n) } // Dir returns the parent directory of the current FilePath. This method is an // alias around filepath.Dir. func (n FilePath) Dir() FilePath { return FilePath(filepath.Dir(n.String())) } // Base returns the base of the current FilePath (the last element in the // path). This method is an alias around filepath.Base. func (n FilePath) Base() string { return filepath.Base(n.String()) } // Ext returns the extension of the current FilePath (starting at and including // the last '.' in the FilePath). This method is an alias around filepath.Ext. func (n FilePath) Ext() string { return filepath.Ext(n.String()) } // BaseName returns the Base of the current FilePath without Ext. func (n FilePath) BaseName() string { return strings.TrimSuffix(n.Base(), n.Ext()) } // SetExt returns a new FilePath with the extension replaced with ext. func (n FilePath) SetExt(ext string) FilePath { return n.SetBase(n.BaseName() + ext) } // SetBase returns a new FilePath with the base element replaced with base. func (n FilePath) SetBase(base string) FilePath { return n.Dir().Push(base) } // Pop returns a new FilePath with the last element removed. Pop is an alias // for the Dir method. func (n FilePath) Pop() FilePath { return n.Dir() } // Push returns a new FilePath with elem added to the end func (n FilePath) Push(elem string) FilePath { return JoinPaths(n.String(), elem) } func fullyQualifiedName(p, e Entity) string { return fmt.Sprintf("%s.%s", p.FullyQualifiedName(), e.Name()) } protoc-gen-star-2.0.3/name_test.go000066400000000000000000000137511440740147700170650ustar00rootroot00000000000000package pgs import ( "fmt" "testing" "strings" "github.com/stretchr/testify/assert" ) func TestName_Split(t *testing.T) { t.Parallel() tests := []struct { in string parts []string }{ // camel case {"fooBar", []string{"foo", "Bar"}}, {"FooBar", []string{"Foo", "Bar"}}, {"myJSON", []string{"my", "JSON"}}, {"JSONStringFooBar", []string{"JSON", "String", "Foo", "Bar"}}, {"myJSONString", []string{"my", "JSON", "String"}}, // snake case {"FOO_BAR", []string{"FOO", "BAR"}}, {"foo_bar_baz", []string{"foo", "bar", "baz"}}, {"Foo_Bar", []string{"Foo", "Bar"}}, {"JSONString_Foo_Bar", []string{"JSONString", "Foo", "Bar"}}, // dot notation {"foo.bar", []string{"foo", "bar"}}, {".foo.bar", []string{"", "foo", "bar"}}, {".JSONString.Foo.Bar", []string{"", "JSONString", "Foo", "Bar"}}, // leading underscore {"_Privatish", []string{"_Privatish"}}, {"_privatish", []string{"_privatish"}}, {"_foo_bar", []string{"_foo", "bar"}}, {"_Foo_Bar", []string{"_Foo", "Bar"}}, {"_JSON_String", []string{"_JSON", "String"}}, {"_JString", []string{"_J", "String"}}, {"__Double", []string{"_", "Double"}}, // numbers {"abc123", []string{"abc", "123"}}, {"123def", []string{"123", "def"}}, {"abc1def", []string{"abc", "1", "def"}}, {"ABC1DEF", []string{"ABC", "1", "DEF"}}, // empty {"", []string{""}}, } for _, test := range tests { tc := test t.Run(tc.in, func(t *testing.T) { t.Parallel() assert.Equal(t, tc.parts, Name(tc.in).Split()) }) } } func TestName(t *testing.T) { t.Parallel() tests := []struct { in []string ucc string lcc string ssc string lsc string usc string ldn string udn string }{ { in: []string{"fooBar", "FooBar", "foo_bar", "Foo_Bar", "foo_Bar", "foo.Bar", "Foo.Bar"}, ucc: "FooBar", lcc: "fooBar", ssc: "FOO_BAR", lsc: "foo_bar", usc: "Foo_Bar", ldn: "foo.bar", udn: "Foo.Bar", }, { in: []string{"JSONString", "JSON_String", "JSON_string", "JSON.string"}, ucc: "JSONString", lcc: "jsonString", ssc: "JSON_STRING", lsc: "json_string", usc: "JSON_String", ldn: "json.string", udn: "JSON.String", }, { in: []string{"myJSON", "my_JSON", "My_JSON", "my.JSON"}, ucc: "MyJSON", lcc: "myJSON", ssc: "MY_JSON", lsc: "my_json", usc: "My_JSON", ldn: "my.json", udn: "My.JSON", }, } for _, test := range tests { tc := test for _, in := range tc.in { n := Name(in) t.Run(string(n), func(t *testing.T) { t.Parallel() t.Run("UpperCamelCase", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.ucc, n.UpperCamelCase().String()) }) t.Run("lowerCamelCase", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.lcc, n.LowerCamelCase().String()) }) t.Run("SCREAMING_SNAKE_CASE", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.ssc, n.ScreamingSnakeCase().String()) }) t.Run("lower_snake_case", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.lsc, n.LowerSnakeCase().String()) }) t.Run("Upper_Snake_Case", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.usc, n.UpperSnakeCase().String()) }) t.Run("lower.dot.notation", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.ldn, n.LowerDotNotation().String()) }) t.Run("Upper.Dot.Notation", func(t *testing.T) { t.Parallel() assert.Equal(t, tc.udn, n.UpperDotNotation().String()) }) }) } } } func TestNameTransformer_Chain(t *testing.T) { t.Parallel() nt := NameTransformer(strings.ToUpper) nt = nt.Chain(func(s string) string { return "_" + s }) assert.Equal(t, "_FOO", nt("foo")) } func TestFilePath(t *testing.T) { t.Parallel() fp := FilePath("alpha/beta/gamma.proto") assert.Equal(t, "alpha/beta/gamma.proto", fp.String()) assert.Equal(t, "alpha/beta", fp.Dir().String()) assert.Equal(t, "gamma.proto", fp.Base()) assert.Equal(t, ".proto", fp.Ext()) assert.Equal(t, "gamma", fp.BaseName()) assert.Equal(t, "alpha/beta/gamma.foo", fp.SetExt(".foo").String()) assert.Equal(t, "alpha/beta/delta.bar", fp.SetBase("delta.bar").String()) assert.Equal(t, "alpha/beta", fp.Pop().String()) assert.Equal(t, "alpha/beta/delta", fp.Dir().Push("delta").String()) } func ExampleName_UpperCamelCase() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).UpperCamelCase()) } // Output: // FooBar // MyJSON // PDFTemplate } func ExampleName_LowerCamelCase() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).LowerCamelCase()) } // Output: // fooBar // myJSON // pdfTemplate } func ExampleName_ScreamingSnakeCase() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).ScreamingSnakeCase()) } // Output: // FOO_BAR // MY_JSON // PDF_TEMPLATE } func ExampleName_LowerSnakeCase() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).LowerSnakeCase()) } // Output: // foo_bar // my_json // pdf_template } func ExampleName_UpperSnakeCase() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).UpperSnakeCase()) } // Output: // Foo_Bar // My_JSON // PDF_Template } func ExampleName_SnakeCase() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).SnakeCase()) } // Output: // foo_bar // my_JSON // PDF_Template } func ExampleName_LowerDotNotation() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).LowerDotNotation()) } // Output: // foo.bar // my.json // pdf.template } func ExampleName_UpperDotNotation() { names := []string{ "foo_bar", "myJSON", "PDFTemplate", } for _, n := range names { fmt.Println(Name(n).UpperDotNotation()) } // Output: // Foo.Bar // My.JSON // PDF.Template } protoc-gen-star-2.0.3/node.go000066400000000000000000000074271440740147700160360ustar00rootroot00000000000000package pgs // Node represents any member of the proto descriptor AST. Typically, the // highest level Node is the Package. type Node interface { accept(Visitor) error } // A Visitor exposes methods to walk an AST Node and its children in a depth- // first manner. If the returned Visitor v is non-nil, it will be used to // descend into the children of the current node. If nil, those children will // be skipped. Any error returned will immediately halt execution. type Visitor interface { VisitPackage(Package) (v Visitor, err error) VisitFile(File) (v Visitor, err error) VisitMessage(Message) (v Visitor, err error) VisitEnum(Enum) (v Visitor, err error) VisitEnumValue(EnumValue) (v Visitor, err error) VisitField(Field) (v Visitor, err error) VisitExtension(Extension) (v Visitor, err error) VisitOneOf(OneOf) (v Visitor, err error) VisitService(Service) (v Visitor, err error) VisitMethod(Method) (v Visitor, err error) } // Walk applies a depth-first visitor pattern with v against Node n. func Walk(v Visitor, n Node) error { return n.accept(v) } type nilVisitor struct{} // NilVisitor returns a Visitor that always responds with (nil, nil) for all // methods. This is useful as an anonymous embedded struct to satisfy the // Visitor interface for implementations that don't require visiting every Node // type. NilVisitor should be used over PassThroughVisitor if short-circuiting // behavior is desired. func NilVisitor() Visitor { return nilVisitor{} } func (nv nilVisitor) VisitPackage(p Package) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitFile(f File) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitMessage(m Message) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitEnum(e Enum) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitEnumValue(e EnumValue) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitField(f Field) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitExtension(e Extension) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitOneOf(o OneOf) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitService(s Service) (v Visitor, err error) { return nil, nil } func (nv nilVisitor) VisitMethod(m Method) (v Visitor, err error) { return nil, nil } var _ Visitor = nilVisitor{} type passVisitor struct { v Visitor } // PassThroughVisitor returns a Visitor that always responds with (v, nil) for // all methods. This is useful as an anonymous embedded struct to satisfy the // Visitor interface for implementations that need access to deep child nodes // (eg, EnumValue, Field, Method) without implementing each method of the // interface explicitly. func PassThroughVisitor(v Visitor) Visitor { return passVisitor{v: v} } func (pv passVisitor) VisitPackage(Package) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitFile(File) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitMessage(Message) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitEnum(Enum) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitEnumValue(EnumValue) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitField(Field) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitExtension(Extension) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitOneOf(OneOf) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitService(Service) (v Visitor, err error) { return pv.v, nil } func (pv passVisitor) VisitMethod(Method) (v Visitor, err error) { return pv.v, nil } var ( _ Visitor = nilVisitor{} _ Visitor = passVisitor{} ) protoc-gen-star-2.0.3/node_nilvisitor_test.go000066400000000000000000000020421440740147700213430ustar00rootroot00000000000000package pgs import ( "fmt" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) type enumPrinter struct { Visitor } func EnumPrinter() Visitor { return enumPrinter{NilVisitor()} } func (p enumPrinter) VisitMessage(m Message) (Visitor, error) { return p, nil } func (p enumPrinter) VisitEnum(e Enum) (Visitor, error) { fmt.Println(e.Name()) return nil, nil } func ExampleNilVisitor() { n := enumNode() p := EnumPrinter() if err := Walk(p, n); err != nil { panic(err) } // Output: // Bar // Foo } func enumNode() Node { // simulating the following proto file: // // syntax="proto3"; // // package fizz; // // message Gadget { // // enum Bar { // // ... // } // // message Gizmo { // enum Foo { // // ... // } // } // } sm := &msg{} sm.addEnum(&enum{desc: &descriptor.EnumDescriptorProto{Name: proto.String("Foo")}}) m := &msg{} m.addMessage(sm) m.addEnum(&enum{desc: &descriptor.EnumDescriptorProto{Name: proto.String("Bar")}}) return m } protoc-gen-star-2.0.3/node_passvisitor_test.go000066400000000000000000000020261440740147700215310ustar00rootroot00000000000000package pgs import ( "fmt" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) type fieldPrinter struct { Visitor } func FieldPrinter() Visitor { p := &fieldPrinter{} p.Visitor = PassThroughVisitor(p) return p } func (p fieldPrinter) VisitField(f Field) (Visitor, error) { fmt.Println(f.Name()) return nil, nil } func ExamplePassThroughVisitor() { n := fieldNode() p := FieldPrinter() if err := Walk(p, n); err != nil { panic(err) } // Output: // Foo // Bar } func fieldNode() Node { // simulating the following proto file: // // syntax="proto3"; // // package fizz; // // message Gadget { // string Bar = 1; // // message Gizmo { // int Foo = 1; // } // } sm := &msg{} sm.addField(&field{desc: &descriptor.FieldDescriptorProto{Name: proto.String("Foo")}}) m := &msg{} m.addMessage(sm) m.addField(&field{desc: &descriptor.FieldDescriptorProto{Name: proto.String("Bar")}}) f := &file{} f.addMessage(m) p := &pkg{} p.addFile(f) return p } protoc-gen-star-2.0.3/node_test.go000066400000000000000000000066531440740147700170750ustar00rootroot00000000000000package pgs import ( "errors" "testing" "github.com/stretchr/testify/assert" ) type mockNode struct { Node a func(Visitor) error } func (n mockNode) accept(v Visitor) error { return n.a(v) } func TestWalk(t *testing.T) { t.Parallel() e := errors.New("TestWalk") type mockVisitor struct{ Visitor } mv := mockVisitor{} n := mockNode{} n.a = func(v Visitor) error { assert.Equal(t, mv, v) return e } assert.Equal(t, e, Walk(mv, n)) } func TestNilVisitor(t *testing.T) { t.Parallel() nv := NilVisitor() v, err := nv.VisitPackage(&pkg{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitFile(&file{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitService(&service{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitMethod(&method{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitEnum(&enum{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitEnumValue(&enumVal{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitMessage(&msg{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitField(&field{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitExtension(&ext{}) assert.Nil(t, v) assert.NoError(t, err) v, err = nv.VisitOneOf(&oneof{}) assert.Nil(t, v) assert.NoError(t, err) } func TestPassThroughVisitor(t *testing.T) { t.Parallel() nv := NilVisitor() pv := PassThroughVisitor(nv) v, err := pv.VisitPackage(&pkg{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitFile(&file{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitService(&service{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitMethod(&method{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitEnum(&enum{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitEnumValue(&enumVal{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitMessage(&msg{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitField(&field{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitExtension(&ext{}) assert.Equal(t, nv, v) assert.NoError(t, err) v, err = pv.VisitOneOf(&oneof{}) assert.Equal(t, nv, v) assert.NoError(t, err) } type mockVisitor struct { v Visitor err error pkg, file, message, enum, enumvalue, extension, field, oneof, service, method int } func (v *mockVisitor) VisitPackage(p Package) (w Visitor, err error) { v.pkg++ return v.v, v.err } func (v *mockVisitor) VisitFile(f File) (w Visitor, err error) { v.file++ return v.v, v.err } func (v *mockVisitor) VisitMessage(m Message) (w Visitor, err error) { v.message++ return v.v, v.err } func (v *mockVisitor) VisitEnum(e Enum) (w Visitor, err error) { v.enum++ return v.v, v.err } func (v *mockVisitor) VisitEnumValue(ev EnumValue) (w Visitor, err error) { v.enumvalue++ return v.v, v.err } func (v *mockVisitor) VisitField(f Field) (w Visitor, err error) { v.field++ return v.v, v.err } func (v *mockVisitor) VisitExtension(e Extension) (w Visitor, err error) { v.extension++ return v.v, v.err } func (v *mockVisitor) VisitOneOf(o OneOf) (w Visitor, err error) { v.oneof++ return v.v, v.err } func (v *mockVisitor) VisitService(s Service) (w Visitor, err error) { v.service++ return v.v, v.err } func (v *mockVisitor) VisitMethod(m Method) (w Visitor, err error) { v.method++ return v.v, v.err } func (v *mockVisitor) Reset() { *v = mockVisitor{v: v.v} } protoc-gen-star-2.0.3/oneof.go000066400000000000000000000054301440740147700162070ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // OneOf describes a OneOf block within a Message. OneOfs behave like C++ // unions, where only one of the contained fields will exist on the Message. type OneOf interface { Entity // Descriptor returns the underlying proto descriptor for this OneOf Descriptor() *descriptor.OneofDescriptorProto // Message returns the parent message for this OneOf. Message() Message // Fields returns all fields contained within this OneOf. Fields() []Field // IsSynthetic returns true if this is a proto3 synthetic oneof. // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/field_presence.md IsSynthetic() bool setMessage(m Message) addField(f Field) } type oneof struct { desc *descriptor.OneofDescriptorProto msg Message flds []Field fqn string info SourceCodeInfo } func (o *oneof) accept(v Visitor) (err error) { if v == nil { return } _, err = v.VisitOneOf(o) return } func (o *oneof) Name() Name { return Name(o.desc.GetName()) } func (o *oneof) FullyQualifiedName() string { return o.fqn } func (o *oneof) Syntax() Syntax { return o.msg.Syntax() } func (o *oneof) Package() Package { return o.msg.Package() } func (o *oneof) File() File { return o.msg.File() } func (o *oneof) BuildTarget() bool { return o.msg.BuildTarget() } func (o *oneof) SourceCodeInfo() SourceCodeInfo { return o.info } func (o *oneof) Descriptor() *descriptor.OneofDescriptorProto { return o.desc } func (o *oneof) Message() Message { return o.msg } func (o *oneof) setMessage(m Message) { o.msg = m } func (o *oneof) IsSynthetic() bool { return o.Syntax() == Proto3 && len(o.flds) == 1 && !o.flds[0].InRealOneOf() } func (o *oneof) Imports() (i []File) { // Mapping for avoiding duplicate entries mp := make(map[string]File, len(o.flds)) for _, f := range o.flds { for _, imp := range f.Imports() { mp[imp.File().Name().String()] = imp } } for _, f := range mp { i = append(i, f) } return } func (o *oneof) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (ok bool, err error) { return extension(o.desc.GetOptions(), desc, &ext) } func (o *oneof) Fields() []Field { f := make([]Field, len(o.flds)) copy(f, o.flds) return f } func (o *oneof) addField(f Field) { f.setOneOf(o) o.flds = append(o.flds, f) } func (o *oneof) childAtPath(path []int32) Entity { if len(path) == 0 { return o } return nil } func (o *oneof) addSourceCodeInfo(info SourceCodeInfo) { o.info = info } var _ OneOf = (*oneof)(nil) protoc-gen-star-2.0.3/oneof_test.go000066400000000000000000000064111440740147700172460ustar00rootroot00000000000000package pgs import ( "testing" "errors" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestOneof_Name(t *testing.T) { t.Parallel() o := &oneof{desc: &descriptor.OneofDescriptorProto{Name: proto.String("foo")}} assert.Equal(t, "foo", o.Name().String()) } func TestOneOf_FullyQualifiedName(t *testing.T) { t.Parallel() o := &oneof{fqn: "one_of"} assert.Equal(t, o.fqn, o.FullyQualifiedName()) } func TestOneof_Syntax(t *testing.T) { t.Parallel() m := dummyMsg() o := &oneof{} m.addOneOf(o) assert.Equal(t, m.Syntax(), o.Syntax()) } func TestOneof_Package(t *testing.T) { t.Parallel() m := dummyMsg() o := &oneof{} m.addOneOf(o) assert.NotNil(t, o.Package()) assert.Equal(t, m.Package(), o.Package()) } func TestOneof_File(t *testing.T) { t.Parallel() m := dummyMsg() o := &oneof{} m.addOneOf(o) assert.NotNil(t, o.File()) assert.Equal(t, m.File(), o.File()) } func TestOneof_BuildTarget(t *testing.T) { t.Parallel() m := dummyMsg() o := &oneof{} m.addOneOf(o) assert.False(t, o.BuildTarget()) m.setParent(&file{buildTarget: true}) assert.True(t, o.BuildTarget()) } func TestOneof_Descriptor(t *testing.T) { t.Parallel() o := &oneof{desc: &descriptor.OneofDescriptorProto{}} assert.Equal(t, o.desc, o.Descriptor()) } func TestOneof_Message(t *testing.T) { t.Parallel() m := dummyMsg() o := &oneof{} m.addOneOf(o) assert.Equal(t, m, o.Message()) } func TestOneof_Imports(t *testing.T) { t.Parallel() o := &oneof{} assert.Empty(t, o.Imports()) o.addField(&mockField{i: []File{&file{}, &file{}}, Field: &field{}}) assert.Len(t, o.Imports(), 1) f := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} o.addField(&mockField{i: []File{f}, Field: &field{}}) assert.Len(t, o.Imports(), 2) } func TestOneof_Extension(t *testing.T) { // cannot be parallel o := &oneof{desc: &descriptor.OneofDescriptorProto{}} assert.NotPanics(t, func() { o.Extension(nil, nil) }) } func TestOneof_Fields(t *testing.T) { t.Parallel() o := &oneof{} assert.Empty(t, o.Fields()) o.addField(&field{}) assert.Len(t, o.Fields(), 1) } func TestOneof_IsSynthetic(t *testing.T) { t.Parallel() o := &oneof{msg: &msg{parent: dummyFile()}} assert.False(t, o.IsSynthetic()) o.flds = []Field{dummyField()} o.flds[0].setOneOf(o) assert.False(t, o.IsSynthetic()) o.flds = []Field{dummyOneOfField(true)} assert.True(t, o.IsSynthetic()) } func TestOneof_Accept(t *testing.T) { t.Parallel() o := &oneof{} assert.NoError(t, o.accept(nil)) v := &mockVisitor{err: errors.New("")} assert.Error(t, o.accept(v)) assert.Equal(t, 1, v.oneof) } func TestOneof_ChildAtPath(t *testing.T) { t.Parallel() o := &oneof{} assert.Equal(t, o, o.childAtPath(nil)) assert.Nil(t, o.childAtPath([]int32{1})) } type mockOneOf struct { OneOf i []File m Message err error } func (o *mockOneOf) Imports() []File { return o.i } func (o *mockOneOf) setMessage(m Message) { o.m = m } func (o *mockOneOf) accept(v Visitor) error { _, err := v.VisitOneOf(o) if o.err != nil { return o.err } return err } func dummyOneof() *oneof { m := dummyMsg() o := &oneof{desc: &descriptor.OneofDescriptorProto{Name: proto.String("oneof")}} m.addOneOf(o) return o } protoc-gen-star-2.0.3/package.go000066400000000000000000000020001440740147700164620ustar00rootroot00000000000000package pgs import descriptor "google.golang.org/protobuf/types/descriptorpb" // Package is a container that encapsulates all the files under a single // package namespace. type Package interface { Node // The name of the proto package. ProtoName() Name // All the files loaded for this Package Files() []File addFile(f File) setComments(c string) } type pkg struct { fd *descriptor.FileDescriptorProto files []File comments string } func (p *pkg) ProtoName() Name { return Name(p.fd.GetPackage()) } func (p *pkg) Comments() string { return p.comments } func (p *pkg) Files() []File { return p.files } func (p *pkg) accept(v Visitor) (err error) { if v == nil { return nil } if v, err = v.VisitPackage(p); err != nil || v == nil { return } for _, f := range p.Files() { if err = f.accept(v); err != nil { return } } return } func (p *pkg) addFile(f File) { f.setPackage(p) p.files = append(p.files, f) } func (p *pkg) setComments(comments string) { p.comments = comments } protoc-gen-star-2.0.3/package_test.go000066400000000000000000000030451440740147700175330ustar00rootroot00000000000000package pgs import ( "testing" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" "errors" "github.com/stretchr/testify/assert" ) func TestPkg_ProtoName(t *testing.T) { t.Parallel() p := dummyPkg() assert.Equal(t, p.fd.GetPackage(), p.ProtoName().String()) } func TestPkg_Files(t *testing.T) { t.Parallel() p := &pkg{} assert.Empty(t, p.Files()) p.addFile(&file{}) p.addFile(&file{}) p.addFile(&file{}) assert.Len(t, p.Files(), 3) } func TestPkg_AddFile(t *testing.T) { t.Parallel() p := &pkg{} f := &file{} p.addFile(f) assert.Len(t, p.files, 1) assert.EqualValues(t, f, p.files[0]) } func TestPkg_Accept(t *testing.T) { t.Parallel() p := &pkg{ files: []File{&mockFile{}}, } assert.Nil(t, p.accept(nil)) v := &mockVisitor{} assert.NoError(t, p.accept(v)) assert.Equal(t, 1, v.pkg) assert.Zero(t, v.file) v.Reset() v.err = errors.New("foobar") assert.EqualError(t, p.accept(v), "foobar") assert.Equal(t, 1, v.pkg) assert.Zero(t, v.file) v.Reset() v.v = v assert.NoError(t, p.accept(v)) assert.Equal(t, 1, v.pkg) assert.Equal(t, 1, v.file) v.Reset() p.addFile(&mockFile{err: errors.New("fizzbuzz")}) assert.EqualError(t, p.accept(v), "fizzbuzz") assert.Equal(t, 1, v.pkg) assert.Equal(t, 2, v.file) } func TestPackage_Comments(t *testing.T) { t.Parallel() pkg := dummyPkg() pkg.setComments("foobar") assert.Equal(t, "foobar", pkg.Comments()) } func dummyPkg() *pkg { return &pkg{ fd: &descriptor.FileDescriptorProto{Package: proto.String("pkg_name")}, } } protoc-gen-star-2.0.3/parameters.go000066400000000000000000000140321440740147700172420ustar00rootroot00000000000000package pgs import ( "fmt" "sort" "strconv" "strings" "time" ) const outputPathKey = "output_path" // Parameters provides a convenience for accessing and modifying the parameters // passed into the protoc-gen-star plugin. type Parameters map[string]string // ParseParameters converts the raw params string provided by protoc into a // representative mapping. func ParseParameters(p string) (params Parameters) { parts := strings.Split(p, ",") params = make(map[string]string, len(parts)) for _, p = range parts { if i := strings.Index(p, "="); i < 0 { params[p] = "" } else { params[p[:i]] = p[i+1:] } } return } // Clone creates an independent copy of Parameters p. func (p Parameters) Clone() Parameters { out := make(Parameters, len(p)) for k, v := range p { out[k] = v } return out } // OutputPath returns the protoc-gen-star special parameter. If not set in the // execution of protoc, "." is returned, indicating that output is relative to // the (unknown) output location for sub-plugins or the directory where protoc // is executed for a Module. Setting "output_path" during the protoc execution // ensures that Modules can know absolutely where to generate code. func (p Parameters) OutputPath() string { return p.StrDefault(outputPathKey, ".") } // SetOutputPath sets the protoc-gen-star OutputPath parameter. This is useful // for overriding the behavior of the ImportPath at runtime. func (p Parameters) SetOutputPath(path string) { p.SetStr(outputPathKey, path) } // String satisfies the string.Stringer interface. This method returns p in the // format it is provided to the protoc execution. Output of this function is // always stable; parameters are sorted before the string is emitted. func (p Parameters) String() string { parts := make([]string, 0, len(p)) for k, v := range p { if v == "" { parts = append(parts, k) } else { parts = append(parts, fmt.Sprintf("%s=%s", k, v)) } } sort.Strings(parts) return strings.Join(parts, ",") } // Str returns the parameter with name, returning an empty string if it is not // set. func (p Parameters) Str(name string) string { return p.StrDefault(name, "") } // StrDefault returns the parameter with name, or if it is unset, returns the // def default value. func (p Parameters) StrDefault(name string, def string) string { if s, ok := p[name]; ok { return s } return def } // SetStr sets the parameter name to s. func (p Parameters) SetStr(name string, s string) { p[name] = s } // Int returns the parameter with name, returning zero if it is not set. An // error is returned if the value cannot be parsed as an int. func (p Parameters) Int(name string) (int, error) { return p.IntDefault(name, 0) } // IntDefault returns the parameter with name, or if it is unset, returns the // def default value. An error is returned if the value cannot be parsed as an // int. func (p Parameters) IntDefault(name string, def int) (int, error) { if s, ok := p[name]; ok { return strconv.Atoi(s) } return def, nil } // SetInt sets the parameter name to i. func (p Parameters) SetInt(name string, i int) { p[name] = strconv.Itoa(i) } // Uint returns the parameter with name, returning zero if it is not set. An // error is returned if the value cannot be parsed as a base-10 uint. func (p Parameters) Uint(name string) (uint, error) { return p.UintDefault(name, 0) } // UintDefault returns the parameter with name, or if it is unset, returns the // def default value. An error is returned if the value cannot be parsed as a // base-10 uint. func (p Parameters) UintDefault(name string, def uint) (uint, error) { if s, ok := p[name]; ok { ui, err := strconv.ParseUint(s, 10, strconv.IntSize) return uint(ui), err } return def, nil } // SetUint sets the parameter name to ui. func (p Parameters) SetUint(name string, ui uint) { p[name] = strconv.FormatUint(uint64(ui), 10) } // Float returns the parameter with name, returning zero if it is // not set. An error is returned if the value cannot be parsed as a float64 func (p Parameters) Float(name string) (float64, error) { return p.FloatDefault(name, 0) } // FloatDefault returns the parameter with name, or if it is unset, returns the // def default value. An error is returned if the value cannot be parsed as a // float64. func (p Parameters) FloatDefault(name string, def float64) (float64, error) { if s, ok := p[name]; ok { return strconv.ParseFloat(s, 64) } return def, nil } // SetFloat sets the parameter name to f. func (p Parameters) SetFloat(name string, f float64) { p[name] = strconv.FormatFloat(f, 'g', -1, 64) } // Bool returns the parameter with name, returning false if it is not set. An // error is returned if the value cannot be parsed as a boolean. Empty values // are considered true. func (p Parameters) Bool(name string) (bool, error) { return p.BoolDefault(name, false) } // BoolDefault returns the parameter with name, or if it is unset, returns the // def default value. An error is returned if the value cannot be parsed as a // boolean. Empty values are considered true. func (p Parameters) BoolDefault(name string, def bool) (bool, error) { if s, ok := p[name]; ok { if strings.TrimSpace(s) == "" { return true, nil } return strconv.ParseBool(s) } return def, nil } // SetBool sets the parameter name to b. func (p Parameters) SetBool(name string, b bool) { p[name] = strconv.FormatBool(b) } // Duration returns the parameter with name, returning zero if it is not set. // An error is returned if the value cannot be parsed as a time.Duration. func (p Parameters) Duration(name string) (time.Duration, error) { return p.DurationDefault(name, 0) } // DurationDefault returns the parameter with name, or if it is unset, returns // the def default value. An error is returned if the value cannot be parsed as // a time.Duration. func (p Parameters) DurationDefault(name string, def time.Duration) (time.Duration, error) { if s, ok := p[name]; ok { return time.ParseDuration(s) } return def, nil } // SetDuration sets the parameter name to d. func (p Parameters) SetDuration(name string, d time.Duration) { p[name] = d.String() } protoc-gen-star-2.0.3/parameters_test.go000066400000000000000000000103661440740147700203070ustar00rootroot00000000000000package pgs import ( "testing" "time" "github.com/stretchr/testify/assert" ) func TestParameters_OutputPath(t *testing.T) { t.Parallel() p := Parameters{} assert.Equal(t, ".", p.OutputPath()) p.SetOutputPath("foo") assert.Equal(t, "foo", p.OutputPath()) } func TestParseParameters(t *testing.T) { t.Parallel() tests := []struct { in string out Parameters }{ { "foo=bar", Parameters{"foo": "bar"}, }, { "fizz", Parameters{"fizz": ""}, }, { "foo=bar,fizz=buzz", Parameters{"foo": "bar", "fizz": "buzz"}, }, { "foo=bar,foo", Parameters{"foo": ""}, }, } for _, test := range tests { tc := test t.Run(tc.in, func(t *testing.T) { assert.Equal(t, tc.out, ParseParameters(tc.in)) }) } } func TestParameters_String(t *testing.T) { t.Parallel() tests := []struct { in Parameters out string }{ { Parameters{"foo": "bar"}, "foo=bar", }, { Parameters{"fizz": ""}, "fizz", }, { Parameters{"foo": "bar", "fizz": ""}, "fizz,foo=bar", }, } for _, test := range tests { tc := test t.Run(tc.out, func(t *testing.T) { assert.Equal(t, tc.out, tc.in.String()) }) } } func TestParameters_Str(t *testing.T) { t.Parallel() p := Parameters{"foo": "bar"} assert.Equal(t, "bar", p.Str("foo")) assert.Empty(t, p.Str("baz")) assert.Equal(t, "fizz", p.StrDefault("baz", "fizz")) p.SetStr("baz", "buzz") assert.Equal(t, "buzz", p.Str("baz")) } func TestParameters_Int(t *testing.T) { t.Parallel() p := Parameters{"foo": "456", "fizz": "buzz"} out, err := p.Int("foo") assert.NoError(t, err) assert.Equal(t, 456, out) _, err = p.Int("fizz") assert.Error(t, err) out, err = p.Int("baz") assert.NoError(t, err) assert.Zero(t, out) out, err = p.IntDefault("baz", 123) assert.NoError(t, err) assert.Equal(t, 123, out) p.SetInt("baz", 789) out, err = p.Int("baz") assert.NoError(t, err) assert.Equal(t, 789, out) } func TestParameters_Uint(t *testing.T) { t.Parallel() p := Parameters{"foo": "456", "fizz": "-789"} out, err := p.Uint("foo") assert.NoError(t, err) assert.Equal(t, uint(456), out) _, err = p.Uint("fizz") assert.Error(t, err) out, err = p.Uint("buzz") assert.NoError(t, err) assert.Zero(t, out) out, err = p.UintDefault("baz", 123) assert.NoError(t, err) assert.Equal(t, uint(123), out) p.SetUint("baz", 999) out, err = p.Uint("baz") assert.NoError(t, err) assert.Equal(t, uint(999), out) } func TestParameters_Float(t *testing.T) { t.Parallel() p := Parameters{"foo": "1.23", "fizz": "buzz"} out, err := p.Float("foo") assert.NoError(t, err) assert.Equal(t, 1.23, out) _, err = p.Float("fizz") assert.Error(t, err) out, err = p.Float("baz") assert.NoError(t, err) assert.Zero(t, out) out, err = p.FloatDefault("baz", 4.56) assert.NoError(t, err) assert.Equal(t, 4.56, out) p.SetFloat("baz", -7.89) out, err = p.Float("baz") assert.NoError(t, err) assert.Equal(t, -7.89, out) } func TestParameters_Bool(t *testing.T) { t.Parallel() p := Parameters{"foo": "true", "bar": "", "fizz": "buzz"} out, err := p.Bool("foo") assert.NoError(t, err) assert.True(t, out) out, err = p.Bool("bar") assert.NoError(t, err) assert.True(t, out) _, err = p.Bool("fizz") assert.Error(t, err) out, err = p.Bool("baz") assert.NoError(t, err) assert.False(t, out) out, err = p.BoolDefault("baz", true) assert.NoError(t, err) assert.True(t, out) p.SetBool("baz", true) out, err = p.Bool("baz") assert.NoError(t, err) assert.True(t, out) } func TestParameters_Duration(t *testing.T) { t.Parallel() p := Parameters{"foo": "123s", "fizz": "buzz"} out, err := p.Duration("foo") assert.NoError(t, err) assert.Equal(t, 123*time.Second, out) _, err = p.Duration("fizz") assert.Error(t, err) out, err = p.Duration("baz") assert.NoError(t, err) assert.Zero(t, out) out, err = p.DurationDefault("baz", 456*time.Second) assert.NoError(t, err) assert.Equal(t, 456*time.Second, out) p.SetDuration("baz", 789*time.Second) out, err = p.Duration("baz") assert.NoError(t, err) assert.Equal(t, 789*time.Second, out) } func TestParameters_Clone(t *testing.T) { t.Parallel() orig := Parameters{"foo": "bar", "fizz": "buzz"} clone := orig.Clone() assert.Equal(t, orig, clone) clone.SetStr("foo", "baz") assert.NotEqual(t, orig, clone) } protoc-gen-star-2.0.3/persister.go000066400000000000000000000121571440740147700171250ustar00rootroot00000000000000package pgs import ( "os" "path/filepath" "strings" "github.com/spf13/afero" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) type persister interface { SetDebugger(d Debugger) SetFS(fs afero.Fs) SetSupportedFeatures(f *uint64) AddPostProcessor(proc ...PostProcessor) Persist(a ...Artifact) *plugin_go.CodeGeneratorResponse } type stdPersister struct { Debugger fs afero.Fs procs []PostProcessor supportedFeatures *uint64 } func newPersister() *stdPersister { return &stdPersister{fs: afero.NewOsFs()} } func (p *stdPersister) SetDebugger(d Debugger) { p.Debugger = d } func (p *stdPersister) SetFS(fs afero.Fs) { p.fs = fs } func (p *stdPersister) SetSupportedFeatures(f *uint64) { p.supportedFeatures = f } func (p *stdPersister) AddPostProcessor(proc ...PostProcessor) { p.procs = append(p.procs, proc...) } func (p *stdPersister) Persist(arts ...Artifact) *plugin_go.CodeGeneratorResponse { resp := new(plugin_go.CodeGeneratorResponse) resp.SupportedFeatures = p.supportedFeatures for _, a := range arts { switch a := a.(type) { case GeneratorFile: f, err := a.ProtoFile() p.CheckErr(err, "unable to convert ", a.Name, " to proto") f.Content = proto.String(p.postProcess(a, f.GetContent())) p.insertFile(resp, f, a.Overwrite) case GeneratorTemplateFile: f, err := a.ProtoFile() p.CheckErr(err, "unable to convert ", a.Name, " to proto") f.Content = proto.String(p.postProcess(a, f.GetContent())) p.insertFile(resp, f, a.Overwrite) case GeneratorAppend: f, err := a.ProtoFile() p.CheckErr(err, "unable to convert append for ", a.FileName, " to proto") f.Content = proto.String(p.postProcess(a, f.GetContent())) n, _ := cleanGeneratorFileName(a.FileName) p.insertAppend(resp, n, f) case GeneratorTemplateAppend: f, err := a.ProtoFile() p.CheckErr(err, "unable to convert append for ", a.FileName, " to proto") f.Content = proto.String(p.postProcess(a, f.GetContent())) n, _ := cleanGeneratorFileName(a.FileName) p.insertAppend(resp, n, f) case GeneratorInjection: f, err := a.ProtoFile() p.CheckErr(err, "unable to convert injection ", a.InsertionPoint, " for ", a.FileName, " to proto") f.Content = proto.String(p.postProcess(a, f.GetContent())) p.insertFile(resp, f, false) case GeneratorTemplateInjection: f, err := a.ProtoFile() p.CheckErr(err, "unable to convert injection ", a.InsertionPoint, " for ", a.FileName, " to proto") f.Content = proto.String(p.postProcess(a, f.GetContent())) p.insertFile(resp, f, false) case CustomFile: p.writeFile( a.Name, []byte(p.postProcess(a, a.Contents)), a.Overwrite, a.Perms, ) case CustomTemplateFile: content, err := a.render() p.CheckErr(err, "unable to render CustomTemplateFile: ", a.Name) content = p.postProcess(a, content) p.writeFile( a.Name, []byte(content), a.Overwrite, a.Perms, ) case GeneratorError: if resp.Error == nil { resp.Error = proto.String(a.Message) continue } resp.Error = proto.String(strings.Join([]string{resp.GetError(), a.Message}, "; ")) default: p.Failf("unrecognized artifact type: %T", a) } } return resp } func (p *stdPersister) tailOfFile(resp *plugin_go.CodeGeneratorResponse, name string) int { tail := p.indexOfFile(resp, name) if tail == -1 { return -1 } f := resp.GetFile() for i := tail + 1; i < len(f); i++ { if f[i].GetName() != "" { break } tail = i } return tail } func (p *stdPersister) indexOfFile(resp *plugin_go.CodeGeneratorResponse, name string) int { for i, f := range resp.GetFile() { if f.GetName() == name && f.InsertionPoint == nil { return i } } return -1 } func (p *stdPersister) insertFile(resp *plugin_go.CodeGeneratorResponse, f *plugin_go.CodeGeneratorResponse_File, overwrite bool) { if overwrite { if i := p.indexOfFile(resp, f.GetName()); i >= 0 { resp.File[i] = f return } } resp.File = append(resp.File, f) } func (p *stdPersister) insertAppend(resp *plugin_go.CodeGeneratorResponse, name string, f *plugin_go.CodeGeneratorResponse_File) { i := p.tailOfFile(resp, name) p.Assert(i > -1, "append target ", name, " missing") resp.File = append( resp.File[:i+1], append( []*plugin_go.CodeGeneratorResponse_File{f}, resp.File[i+1:]..., )..., ) } func (p *stdPersister) writeFile(name string, content []byte, overwrite bool, perms os.FileMode) { dir := filepath.Dir(name) p.CheckErr( p.fs.MkdirAll(dir, 0755), "unable to create directory:", dir) exists, err := afero.Exists(p.fs, name) p.CheckErr(err, "unable to check file exists:", name) if exists { if !overwrite { p.Debug("file", name, "exists, skipping") return } p.Debug("file", name, "exists, overwriting") } p.CheckErr( afero.WriteFile(p.fs, name, content, perms), "unable to write file:", name) } func (p *stdPersister) postProcess(a Artifact, in string) string { var err error b := []byte(in) for _, pp := range p.procs { if pp.Match(a) { b, err = pp.Process(b) p.CheckErr(err, "failed post-processing") } } return string(b) } protoc-gen-star-2.0.3/persister_test.go000066400000000000000000000174101440740147700201610ustar00rootroot00000000000000package pgs import ( "html/template" "testing" "errors" "github.com/spf13/afero" "github.com/stretchr/testify/assert" ) func TestPersister_Persist_Unrecognized(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) p.Persist(nil) assert.True(t, d.Failed()) } func TestPersister_Persist_GeneratorFile(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist( GeneratorFile{ Name: "foo", Contents: "bar", }, GeneratorFile{ Name: "quux", Contents: "baz", }, GeneratorFile{ Name: "foo", Contents: "fizz", Overwrite: true, }) assert.Len(t, resp.File, 2) assert.Equal(t, "foo", resp.File[0].GetName()) assert.Equal(t, "fizz", resp.File[0].GetContent()) } var genTpl = template.Must(template.New("good").Parse("{{ . }}")) func TestPersister_Persist_GeneratorTemplateFile(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist( GeneratorTemplateFile{ Name: "foo", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "bar", }, }, GeneratorTemplateFile{ Name: "quux", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "baz", }, }, GeneratorTemplateFile{ Name: "foo", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "fizz", }, Overwrite: true, }, ) assert.Len(t, resp.File, 2) assert.Equal(t, "foo", resp.File[0].GetName()) assert.Equal(t, "fizz", resp.File[0].GetContent()) } func TestPersister_Persist_GeneratorAppend(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist( GeneratorFile{Name: "foo"}, GeneratorFile{Name: "bar"}, GeneratorAppend{ FileName: "foo", Contents: "baz", }, GeneratorAppend{ FileName: "bar", Contents: "quux", }, ) assert.Len(t, resp.File, 4) assert.Equal(t, "", resp.File[1].GetName()) assert.Equal(t, "baz", resp.File[1].GetContent()) assert.Equal(t, "", resp.File[3].GetName()) assert.Equal(t, "quux", resp.File[3].GetContent()) p.Persist(GeneratorAppend{FileName: "doesNotExist"}) assert.True(t, d.Failed()) } func TestPersister_Persist_GeneratorAppendSeveral(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist( GeneratorFile{ Name: "file", Contents: "foo", }, GeneratorAppend{ FileName: "file", Contents: "bar", }, GeneratorAppend{ FileName: "file", Contents: "baz", }, ) assert.Len(t, resp.File, 3) assert.Equal(t, "foo", resp.File[0].GetContent()) assert.Equal(t, "bar", resp.File[1].GetContent()) assert.Equal(t, "baz", resp.File[2].GetContent()) } func TestPersister_Persist_GeneratorTemplateAppend(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist( GeneratorFile{Name: "foo"}, GeneratorFile{Name: "bar"}, GeneratorTemplateAppend{ FileName: "foo", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "baz", }, }, GeneratorTemplateAppend{ FileName: "bar", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "quux", }, }, ) assert.Len(t, resp.File, 4) assert.Equal(t, "", resp.File[1].GetName()) assert.Equal(t, "baz", resp.File[1].GetContent()) assert.Equal(t, "", resp.File[3].GetName()) assert.Equal(t, "quux", resp.File[3].GetContent()) resp = p.Persist(GeneratorTemplateAppend{ FileName: "doesNotExist", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "baz", }, }) assert.True(t, d.Failed()) } func TestPersister_Persist_GeneratorInjection(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist(GeneratorInjection{ FileName: "foo", InsertionPoint: "bar", Contents: "baz", }) assert.Len(t, resp.File, 1) assert.Equal(t, "foo", resp.File[0].GetName()) assert.Equal(t, "bar", resp.File[0].GetInsertionPoint()) assert.Equal(t, "baz", resp.File[0].GetContent()) } func TestPersister_Persist_GeneratorTemplateInjection(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist(GeneratorTemplateInjection{ FileName: "foo", InsertionPoint: "bar", TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "baz", }, }) assert.Len(t, resp.File, 1) assert.Equal(t, "foo", resp.File[0].GetName()) assert.Equal(t, "bar", resp.File[0].GetInsertionPoint()) assert.Equal(t, "baz", resp.File[0].GetContent()) } func TestPersister_Persist_CustomFile(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) p.Persist(CustomFile{ Name: "foo/bar/baz.txt", Perms: 0655, Contents: "fizz", }) b, err := afero.ReadFile(fs, "foo/bar/baz.txt") assert.NoError(t, err) assert.Equal(t, "fizz", string(b)) p.Persist(CustomFile{ Name: "foo/bar/baz.txt", Perms: 0655, Contents: "buzz", }) b, err = afero.ReadFile(fs, "foo/bar/baz.txt") assert.NoError(t, err) assert.Equal(t, "fizz", string(b)) p.Persist(CustomFile{ Name: "foo/bar/baz.txt", Perms: 0655, Contents: "buzz", Overwrite: true, }) b, err = afero.ReadFile(fs, "foo/bar/baz.txt") assert.NoError(t, err) assert.Equal(t, "buzz", string(b)) } func TestPersister_Persist_CustomTemplateFile(t *testing.T) { t.Parallel() d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) p.Persist(CustomTemplateFile{ Name: "foo/bar/baz.txt", Perms: 0655, TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "fizz", }, }) b, err := afero.ReadFile(fs, "foo/bar/baz.txt") assert.NoError(t, err) assert.Equal(t, "fizz", string(b)) p.Persist(CustomTemplateFile{ Name: "foo/bar/baz.txt", Perms: 0655, TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "buzz", }, }) b, err = afero.ReadFile(fs, "foo/bar/baz.txt") assert.NoError(t, err) assert.Equal(t, "fizz", string(b)) p.Persist(CustomTemplateFile{ Name: "foo/bar/baz.txt", Perms: 0655, TemplateArtifact: TemplateArtifact{ Template: genTpl, Data: "buzz", }, Overwrite: true, }) b, err = afero.ReadFile(fs, "foo/bar/baz.txt") assert.NoError(t, err) assert.Equal(t, "buzz", string(b)) } func TestPersister_AddPostProcessor(t *testing.T) { t.Parallel() p := dummyPersister(InitMockDebugger()) good := &mockPP{match: true, out: []byte("good")} bad := &mockPP{err: errors.New("should not be called")} p.AddPostProcessor(good, bad) out := p.postProcess(GeneratorFile{}, "") assert.Equal(t, "good", out) } func dummyPersister(d Debugger) *stdPersister { return &stdPersister{ Debugger: d, fs: afero.NewMemMapFs(), } } func TestPersister_Persist_GeneratorError(t *testing.T) { t.Parallel() cases := map[string]struct { input []Artifact output string }{ "no errors": { []Artifact{}, "", }, "one error": { []Artifact{GeneratorError{Message: "something went wrong"}}, "something went wrong", }, "two errors": { []Artifact{ GeneratorError{Message: "something went wrong"}, GeneratorError{Message: "something else went wrong, too"}, }, "something went wrong; something else went wrong, too", }, } for desc, tc := range cases { t.Run(desc, func(t *testing.T) { d := InitMockDebugger() p := dummyPersister(d) fs := afero.NewMemMapFs() p.SetFS(fs) resp := p.Persist(tc.input...) assert.Len(t, resp.File, 0) assert.Equal(t, tc.output, resp.GetError()) }) } } protoc-gen-star-2.0.3/post_process.go000066400000000000000000000006771440740147700176340ustar00rootroot00000000000000package pgs // A PostProcessor modifies the output of an Artifact before final rendering. type PostProcessor interface { // Match returns true if the PostProcess should be applied to the Artifact. // Process is called immediately after Match for the same Artifact. Match(a Artifact) bool // Process receives the rendered artifact and returns the processed bytes or // an error if something goes wrong. Process(in []byte) ([]byte, error) } protoc-gen-star-2.0.3/post_process_test.go000066400000000000000000000003421440740147700206600ustar00rootroot00000000000000package pgs type mockPP struct { match bool out []byte err error } func (pp mockPP) Match(a Artifact) bool { return pp.match } func (pp mockPP) Process(in []byte) ([]byte, error) { return pp.out, pp.err } protoc-gen-star-2.0.3/proto.go000066400000000000000000000127631440740147700162530ustar00rootroot00000000000000package pgs import ( descriptor "google.golang.org/protobuf/types/descriptorpb" ) // Syntax describes the proto syntax used to encode the proto file type Syntax string const ( // Proto2 syntax permits the use of "optional" and "required" prefixes on // fields. Most of the field types in the generated go structs are pointers. // See: https://developers.google.com/protocol-buffers/docs/proto Proto2 Syntax = "" // Proto3 syntax permits the use of "optional" field presence. Non optional fields default to the zero // value of that particular type if not defined. // Most of the field types in the generated go structs are value types. // See: https://github.com/protocolbuffers/protobuf/blob/v3.17.0/docs/field_presence.md#presence-in-proto3-apis Proto3 Syntax = "proto3" ) // SupportsRequiredPrefix returns true if s supports "optional" and // "required" identifiers on message fields. Only Proto2 syntax supports this // feature. func (s Syntax) SupportsRequiredPrefix() bool { return s == Proto2 } // String returns a string representation of the syntax. func (s Syntax) String() string { return string(s) } // ProtoLabel wraps the FieldDescriptorProto_Label enum for better readability. // It is a 1-to-1 conversion. type ProtoLabel descriptor.FieldDescriptorProto_Label const ( // Optional (in the context of Proto2 syntax) identifies that the field may // be unset in the proto message. In Proto3 syntax, all fields are considered // Optional and default to their zero value. Optional = ProtoLabel(descriptor.FieldDescriptorProto_LABEL_OPTIONAL) // Required (in the context of Proto2 syntax) identifies that the field must // be set in the proto message. In Proto3 syntax, no fields can be identified // as Required. Required = ProtoLabel(descriptor.FieldDescriptorProto_LABEL_REQUIRED) // Repeated identifies that the field either permits multiple entries // (repeated) or is a map (map). Determining which requires further // evaluation of the descriptor and whether or not the embedded message is // identified as a MapEntry (see IsMap on FieldType). Repeated = ProtoLabel(descriptor.FieldDescriptorProto_LABEL_REPEATED) ) // Proto returns the FieldDescriptorProto_Label for this ProtoLabel. This // method is exclusively used to improve readability without having to switch // the types. func (pl ProtoLabel) Proto() descriptor.FieldDescriptorProto_Label { return descriptor.FieldDescriptorProto_Label(pl) } // ProtoPtr returns a pointer to the FieldDescriptorProto_Label for this // ProtoLabel. func (pl ProtoLabel) ProtoPtr() *descriptor.FieldDescriptorProto_Label { l := pl.Proto() return &l } // String returns a string representation of the proto label. func (pl ProtoLabel) String() string { return pl.Proto().String() } // ProtoType wraps the FieldDescriptorProto_Type enum for better readability // and utility methods. It is a 1-to-1 conversion. type ProtoType descriptor.FieldDescriptorProto_Type // 1-to-1 mapping of FieldDescriptorProto_Type enum to ProtoType. While all are // listed here, group types are not supported by this library. const ( DoubleT = ProtoType(descriptor.FieldDescriptorProto_TYPE_DOUBLE) FloatT = ProtoType(descriptor.FieldDescriptorProto_TYPE_FLOAT) Int64T = ProtoType(descriptor.FieldDescriptorProto_TYPE_INT64) UInt64T = ProtoType(descriptor.FieldDescriptorProto_TYPE_UINT64) Int32T = ProtoType(descriptor.FieldDescriptorProto_TYPE_INT32) Fixed64T = ProtoType(descriptor.FieldDescriptorProto_TYPE_FIXED64) Fixed32T = ProtoType(descriptor.FieldDescriptorProto_TYPE_FIXED32) BoolT = ProtoType(descriptor.FieldDescriptorProto_TYPE_BOOL) StringT = ProtoType(descriptor.FieldDescriptorProto_TYPE_STRING) GroupT = ProtoType(descriptor.FieldDescriptorProto_TYPE_GROUP) MessageT = ProtoType(descriptor.FieldDescriptorProto_TYPE_MESSAGE) BytesT = ProtoType(descriptor.FieldDescriptorProto_TYPE_BYTES) UInt32T = ProtoType(descriptor.FieldDescriptorProto_TYPE_UINT32) EnumT = ProtoType(descriptor.FieldDescriptorProto_TYPE_ENUM) SFixed32 = ProtoType(descriptor.FieldDescriptorProto_TYPE_SFIXED32) SFixed64 = ProtoType(descriptor.FieldDescriptorProto_TYPE_SFIXED64) SInt32 = ProtoType(descriptor.FieldDescriptorProto_TYPE_SINT32) SInt64 = ProtoType(descriptor.FieldDescriptorProto_TYPE_SINT64) ) // IsInt returns true if pt maps to an integer-like type. While EnumT types in // Go are aliases of uint32, to correctly accommodate other languages with // non-numeric enums, IsInt returns false for EnumT. func (pt ProtoType) IsInt() bool { switch pt { case Int64T, UInt64T, SFixed64, SInt64, Fixed64T, Int32T, UInt32T, SFixed32, SInt32, Fixed32T: return true } return false } // IsNumeric returns true if pt maps to a numeric type. While EnumT types in Go // are aliases of uint32, to correctly accommodate other languages with non-numeric // enums, IsNumeric returns false for EnumT. func (pt ProtoType) IsNumeric() bool { return pt == DoubleT || pt == FloatT || pt.IsInt() } // Proto returns the FieldDescriptorProto_Type for this ProtoType. This // method is exclusively used to improve readability without having to switch // the types. func (pt ProtoType) Proto() descriptor.FieldDescriptorProto_Type { return descriptor.FieldDescriptorProto_Type(pt) } // ProtoPtr returns a pointer to the FieldDescriptorProto_Type for this // ProtoType. func (pt ProtoType) ProtoPtr() *descriptor.FieldDescriptorProto_Type { t := pt.Proto() return &t } // String returns a string representation of the proto type. func (pt ProtoType) String() string { return pt.Proto().String() } protoc-gen-star-2.0.3/proto_test.go000066400000000000000000000035251440740147700173060ustar00rootroot00000000000000package pgs import ( "testing" "github.com/stretchr/testify/assert" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestSyntax_SupportsRequiredPrefix(t *testing.T) { t.Parallel() assert.True(t, Proto2.SupportsRequiredPrefix()) assert.False(t, Proto3.SupportsRequiredPrefix()) } func TestSyntax_String(t *testing.T) { t.Parallel() assert.Equal(t, Proto2.String(), "") assert.Equal(t, Proto3.String(), "proto3") } func TestProtoType_IsInt(t *testing.T) { t.Parallel() yes := []ProtoType{ Int64T, UInt64T, SFixed64, SInt64, Fixed64T, Int32T, UInt32T, SFixed32, SInt32, Fixed32T, } no := []ProtoType{ DoubleT, FloatT, BoolT, StringT, GroupT, MessageT, BytesT, EnumT, } for _, pt := range yes { assert.True(t, pt.IsInt()) } for _, pt := range no { assert.False(t, pt.IsInt()) } } func TestProtoType_IsNumeric(t *testing.T) { t.Parallel() yes := []ProtoType{ Int64T, UInt64T, SFixed64, SInt64, Fixed64T, Int32T, UInt32T, SFixed32, SInt32, Fixed32T, DoubleT, FloatT, } no := []ProtoType{ BoolT, StringT, GroupT, MessageT, BytesT, EnumT, } for _, pt := range yes { assert.True(t, pt.IsNumeric()) } for _, pt := range no { assert.False(t, pt.IsNumeric()) } } func TestProtoType_Proto(t *testing.T) { t.Parallel() pt := BytesT.Proto() ptPtr := BytesT.ProtoPtr() assert.Equal(t, descriptor.FieldDescriptorProto_TYPE_BYTES, pt) assert.Equal(t, pt, *ptPtr) } func TestProtoType_String(t *testing.T) { t.Parallel() assert.Equal(t, DoubleT.String(), "TYPE_DOUBLE") } func TestProtoLabel_Proto(t *testing.T) { t.Parallel() pl := Repeated.Proto() plPtr := Repeated.ProtoPtr() assert.Equal(t, descriptor.FieldDescriptorProto_LABEL_REPEATED, pl) assert.Equal(t, pl, *plPtr) } func TestProtoLabel_String(t *testing.T) { t.Parallel() assert.Equal(t, Repeated.String(), "LABEL_REPEATED") } protoc-gen-star-2.0.3/protoc-gen-debug/000077500000000000000000000000001440740147700177115ustar00rootroot00000000000000protoc-gen-star-2.0.3/protoc-gen-debug/README.md000066400000000000000000000027561440740147700212020ustar00rootroot00000000000000# protoc-gen-debug This plugin can be used to create test files containing the entire encoded CodeGeneratorRequest passed from a protoc execution. This is useful for testing plugins programmatically without having to run protoc. For an example usage, check out [`ast_test.go`](../ast_test.go) in the project root as well as [`testdata/graph`](../testdata/graph) for the test cases. Executing the plugin will place a `code_generator_request.pb.bin` file in the specified output location which can be fed directly into a PG* plugin via the `ProtocInput` init option. ## Installation For a local install: ```bash make bin/protoc-gen-debug ``` For a global install into `$GOPATH/bin`: ```bash go install github.com/lyft/protoc-gen-star/protoc-gen-debug ``` ## Usage To create the `code_generator_request.pb.bin` file for all protos in the current directory: ```bash protoc \ --plugin=protoc-gen-debug=path/to/protoc-gen-debug \ --debug_out=".:." \ *.proto ``` To use the `code_generator_request.pb.bin` in PG*: ```go func TestModule(t *testing.T) { req, err := os.Open("./code_generator_request.pb.bin") if err != nil { t.Fatal(err) } fs := afero.NewMemMapFs() res := &bytes.Buffer{} pgs.Init( pgs.ProtocInput(req), // use the pre-generated request pgs.ProtocOutput(res), // capture CodeGeneratorResponse pgs.FileSystem(fs), // capture any custom files written directly to disk ).RegisterModule(&MyModule{}).Render() // check res and the fs for output } ``` protoc-gen-star-2.0.3/protoc-gen-debug/main.go000066400000000000000000000030741440740147700211700ustar00rootroot00000000000000// protoc-gen-debug emits the raw encoded CodeGeneratorRequest from a protoc // execution to a file. This is particularly useful for testing (see the // testdata/graph package for test cases). package main import ( "bytes" "io" "io/ioutil" "log" "os" "path/filepath" "google.golang.org/protobuf/types/pluginpb" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) func main() { data, err := ioutil.ReadAll(os.Stdin) if err != nil { log.Fatal("unable to read input: ", err) } req := &plugin_go.CodeGeneratorRequest{} if err = proto.Unmarshal(data, req); err != nil { log.Fatal("unable to unmarshal request: ", err) } path := req.GetParameter() if path == "" { log.Fatal(`please execute the plugin with the output path to properly write the output file: --debug_out="{PATH}:{PATH}"`) } err = os.MkdirAll(path, 0755) if err != nil { log.Fatal("unable to create output dir: ", err) } err = ioutil.WriteFile(filepath.Join(path, "code_generator_request.pb.bin"), data, 0644) if err != nil { log.Fatal("unable to write request to disk: ", err) } // protoc-gen-debug supports proto3 field presence for testing purposes var supportedFeatures = uint64(pluginpb.CodeGeneratorResponse_FEATURE_PROTO3_OPTIONAL) if data, err = proto.Marshal(&plugin_go.CodeGeneratorResponse{ SupportedFeatures: &supportedFeatures, }); err != nil { log.Fatal("unable to marshal response payload: ", err) } _, err = io.Copy(os.Stdout, bytes.NewReader(data)) if err != nil { log.Fatal("unable to write response to stdout: ", err) } } protoc-gen-star-2.0.3/service.go000066400000000000000000000051211440740147700165360ustar00rootroot00000000000000package pgs import ( "google.golang.org/protobuf/runtime/protoimpl" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // Service describes a proto service definition (typically, gRPC) type Service interface { Entity // Descriptor returns the underlying proto descriptor for this service Descriptor() *descriptor.ServiceDescriptorProto // Methods returns each rpc method exposed by this service Methods() []Method setFile(f File) addMethod(m Method) } type service struct { desc *descriptor.ServiceDescriptorProto methods []Method file File fqn string info SourceCodeInfo } func (s *service) Name() Name { return Name(s.desc.GetName()) } func (s *service) FullyQualifiedName() string { return s.fqn } func (s *service) Syntax() Syntax { return s.file.Syntax() } func (s *service) Package() Package { return s.file.Package() } func (s *service) File() File { return s.file } func (s *service) BuildTarget() bool { return s.file.BuildTarget() } func (s *service) SourceCodeInfo() SourceCodeInfo { return s.info } func (s *service) Descriptor() *descriptor.ServiceDescriptorProto { return s.desc } func (s *service) Extension(desc *protoimpl.ExtensionInfo, ext interface{}) (bool, error) { return extension(s.desc.GetOptions(), desc, &ext) } func (s *service) Imports() (i []File) { // Mapping for avoiding duplicate entries mp := make(map[string]File, len(s.methods)) for _, m := range s.methods { for _, imp := range m.Imports() { mp[imp.File().Name().String()] = imp } } for _, f := range mp { i = append(i, f) } return } func (s *service) Methods() []Method { m := make([]Method, len(s.methods)) copy(m, s.methods) return m } func (s *service) setFile(f File) { s.file = f } func (s *service) addMethod(m Method) { m.setService(s) s.methods = append(s.methods, m) } func (s *service) accept(v Visitor) (err error) { if v == nil { return } if v, err = v.VisitService(s); err != nil || v == nil { return } for _, m := range s.methods { if err = m.accept(v); err != nil { return } } return } func (s *service) childAtPath(path []int32) Entity { switch { case len(path) == 0: return s case len(path)%2 != 0: return nil case path[0] == serviceTypeMethodPath: return s.methods[path[1]].childAtPath(path[2:]) default: return nil } } func (s *service) addSourceCodeInfo(info SourceCodeInfo) { s.info = info } var _ Service = (*service)(nil) protoc-gen-star-2.0.3/service_test.go000066400000000000000000000065221440740147700176030ustar00rootroot00000000000000package pgs import ( "testing" "errors" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestService_Name(t *testing.T) { t.Parallel() s := &service{desc: &descriptor.ServiceDescriptorProto{Name: proto.String("foo")}} assert.Equal(t, "foo", s.Name().String()) } func TestService_FullyQualifiedName(t *testing.T) { t.Parallel() s := &service{fqn: "foo"} assert.Equal(t, s.fqn, s.FullyQualifiedName()) } func TestService_Syntax(t *testing.T) { t.Parallel() s := &service{} f := dummyFile() f.addService(s) assert.Equal(t, f.Syntax(), s.Syntax()) } func TestService_Package(t *testing.T) { t.Parallel() s := &service{} f := dummyFile() f.addService(s) assert.NotNil(t, s.Package()) assert.Equal(t, f.Package(), s.Package()) } func TestService_File(t *testing.T) { t.Parallel() s := &service{} f := dummyFile() f.addService(s) assert.NotNil(t, s.File()) assert.Equal(t, f, s.File()) } func TestService_BuildTarget(t *testing.T) { t.Parallel() s := &service{} f := dummyFile() f.addService(s) assert.False(t, s.BuildTarget()) f.buildTarget = true assert.True(t, s.BuildTarget()) } func TestService_Descriptor(t *testing.T) { t.Parallel() s := &service{desc: &descriptor.ServiceDescriptorProto{}} assert.Equal(t, s.desc, s.Descriptor()) } func TestService_Extension(t *testing.T) { // cannot be parallel s := &service{desc: &descriptor.ServiceDescriptorProto{}} assert.NotPanics(t, func() { s.Extension(nil, nil) }) } func TestService_Imports(t *testing.T) { t.Parallel() s := &service{} assert.Empty(t, s.Imports()) s.addMethod(&mockMethod{i: []File{&file{}}}) assert.Len(t, s.Imports(), 1) nf := &file{desc: &descriptor.FileDescriptorProto{ Name: proto.String("foobar"), }} s.addMethod(&mockMethod{i: []File{nf, nf}}) assert.Len(t, s.Imports(), 2) } func TestService_Methods(t *testing.T) { t.Parallel() s := &service{} assert.Empty(t, s.Methods()) s.addMethod(&method{}) assert.Len(t, s.Methods(), 1) } func TestService_Accept(t *testing.T) { t.Parallel() s := &service{} s.addMethod(&method{}) assert.NoError(t, s.accept(nil)) v := &mockVisitor{} assert.NoError(t, s.accept(v)) assert.Equal(t, 1, v.service) assert.Zero(t, v.method) v.Reset() v.err = errors.New("fizz") v.v = v assert.Error(t, s.accept(v)) assert.Equal(t, 1, v.service) assert.Zero(t, v.method) v.Reset() assert.NoError(t, s.accept(v)) assert.Equal(t, 1, v.service) assert.Equal(t, 1, v.method) v.Reset() s.addMethod(&mockMethod{err: errors.New("buzz")}) assert.Error(t, s.accept(v)) assert.Equal(t, 1, v.service) assert.Equal(t, 2, v.method) } func TestService_ChildAtPath(t *testing.T) { t.Parallel() s := &service{} assert.Equal(t, s, s.childAtPath(nil)) assert.Nil(t, s.childAtPath([]int32{0})) assert.Nil(t, s.childAtPath([]int32{0, 0})) } type mockService struct { Service i []File f File err error } func (s *mockService) Imports() []File { return s.i } func (s *mockService) setFile(f File) { s.f = f } func (s *mockService) accept(v Visitor) error { _, err := v.VisitService(s) if s.err != nil { return s.err } return err } func dummyService() *service { f := dummyFile() s := &service{ desc: &descriptor.ServiceDescriptorProto{ Name: proto.String("service"), }, } f.addService(s) return s } protoc-gen-star-2.0.3/source_code_info.go000066400000000000000000000045601440740147700204110ustar00rootroot00000000000000package pgs import ( descriptor "google.golang.org/protobuf/types/descriptorpb" ) const ( packagePath int32 = 2 // FileDescriptorProto.Package messageTypePath int32 = 4 // FileDescriptorProto.MessageType enumTypePath int32 = 5 // FileDescriptorProto.EnumType servicePath int32 = 6 // FileDescriptorProto.Service syntaxPath int32 = 12 // FileDescriptorProto.Syntax messageTypeFieldPath int32 = 2 // DescriptorProto.Field messageTypeNestedTypePath int32 = 3 // DescriptorProto.NestedType messageTypeEnumTypePath int32 = 4 // DescriptorProto.EnumType messageTypeOneofDeclPath int32 = 8 // DescriptorProto.OneofDecl enumTypeValuePath int32 = 2 // EnumDescriptorProto.Value serviceTypeMethodPath int32 = 2 // ServiceDescriptorProto.Method ) // SourceCodeInfo represents data about an entity from the source. Currently // this only contains information about comments protoc associates with // entities. // // All comments have their // or /* */ stripped by protoc. See the // SourceCodeInfo documentation for more details about how comments are // associated with entities. type SourceCodeInfo interface { // Location returns the SourceCodeInfo_Location from the file descriptor. Location() *descriptor.SourceCodeInfo_Location // LeadingComments returns any comment immediately preceding the entity, // without any whitespace between it and the comment. LeadingComments() string // LeadingDetachedComments returns each comment block or line above the // entity but separated by whitespace. LeadingDetachedComments() []string // TrailingComments returns any comment immediately following the entity, // without any whitespace between it and the comment. If the comment would be // a leading comment for another entity, it won't be considered a trailing // comment. TrailingComments() string } type sci struct { desc *descriptor.SourceCodeInfo_Location } func (info sci) Location() *descriptor.SourceCodeInfo_Location { return info.desc } func (info sci) LeadingComments() string { return info.desc.GetLeadingComments() } func (info sci) LeadingDetachedComments() []string { return info.desc.GetLeadingDetachedComments() } func (info sci) TrailingComments() string { return info.desc.GetTrailingComments() } var _ SourceCodeInfo = sci{} protoc-gen-star-2.0.3/source_code_info_test.go000066400000000000000000000012351440740147700214440ustar00rootroot00000000000000package pgs import ( "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestSourceCodeInfo(t *testing.T) { t.Parallel() desc := &descriptor.SourceCodeInfo_Location{ LeadingComments: proto.String("leading"), TrailingComments: proto.String("trailing"), LeadingDetachedComments: []string{"detached"}, } info := sci{desc} assert.Equal(t, desc, info.Location()) assert.Equal(t, "leading", info.LeadingComments()) assert.Equal(t, "trailing", info.TrailingComments()) assert.Equal(t, []string{"detached"}, info.LeadingDetachedComments()) } protoc-gen-star-2.0.3/testdata/000077500000000000000000000000001440740147700163615ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/ast/000077500000000000000000000000001440740147700171505ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/ast/ast.dot000066400000000000000000000003231440740147700204450ustar00rootroot00000000000000digraph G { Package -> File; File -> Enum; File -> Service -> Method; File -> Message; Enum -> EnumValue; OneOf -> Field; Message -> Message; Message -> Enum; Message -> Field; Message -> OneOf; }protoc-gen-star-2.0.3/testdata/ast/ast.png000066400000000000000000001062721440740147700204550ustar00rootroot00000000000000PNG  IHDRxᎼsRGB@IDATx\E҆;x X ;- Y,-$d!"m ]篷3ǧy=s|ݧK#!`@0 o!` C0 |"` >j2 CN`@N(j~͍?޽?w_ꫯ ?~v&xb=&dnݺݻ;zc1묳&naŊ[dVO=zh7fkogm673i=TS8Lg93@xw:0~y2` 4EY3 "f >w0z7O?z)=aO>[h Lus>J34ҥKd%O>}:[xwtay)_tE]^܊+gkdc9hDZs{vYf +(\s)o06 /1nw묳~3D|g-o$#F믿=s^iZjLQ~`_~yV[J3PUW]容Omn7wkF{衇7nVvm݀ܔSNXXƆ@|W_u{2w[lJ.Xl5_Tgßp o{o]_h5<@|&)B>cn*8p@;i&R;+u.rw7|ӭڊ+[Z sUj-^O<2V[dРAK@ÌPKYEuzm!51ӑ1rKePby7j(e(azK'*+f=Q`jٿOoɭd1Vizb~Yg,bneRHFGgƛnIjΜvK{#<lSxXf&?*~#PT 駟ܐ!C)Zk-wWnݔۊ?l5mZF4ׯ]{n饗mjRɎY,o72bDYc;F\sMe40c6r-–\rI׻wowGn `:@/;PYLmTęQ1MR9A  情vm&vZ%iͬuy7L,lp`٭⮵-ƍxa` 6]Qzoj=^r72"DY#7Ic_g},eR"?C72q"f*M:i g ?nM62X@k7;D-ݿfJͻSL1Kn nFYArhrפ|ɰ)aaN>dy݉'54㏻w١tIxGk0m/Q0#N;"DV vFӖviz]6Q?tLi|WtZ/q YX3,?e*x`|T׹>$aP^e/Pf6F/4*!`:Jd:,Ia8-.NDJ>KGЍ/\.brtw1fac Qtײ.릛nv|G{Qgaq5 /F@T o^8R:w%({^|p 3DnL_|qw*wy{iw&>\pAz9СC!6}ղw|_|UJ"fW!P5Y-F]lX|9ǏEƎ 6,|Q=LPH'|{lSl /P6 ȶ~vat€+Q+o6Epex blCIx`y d(<뮻j⿧p-]wf!PXSD6t&0tטFIU*xL;T+>mF8Z|CH؂u]g6B- c4Ns+ݬU+ ~a!تC-<#c6;^z2amVy,!` >m5}ٝHב a*x1!L=zKs uZ`.I':(}O?Ug>Cڸ 0|T&2,XTL``q{G9眳]q`0ltߵ};%w23;n/`>Y!sôZhqi?׽s/dK 6,7|PiZ@)K[l/kӃ\ Qah0E$ ,Yx4p@38.Gt$ YHT6zC|bBϠ4_Π_;H9X8C`ñ5*<'?O[&0a\ 3]0a,u`hvD=t`^xa<721 4RMC^GK,|x`Œ=3 Ր!`(#dW_}U)ad50gc5! C(& g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(3pVlC0j!` Bv0 "` > g6 Ck!d C(]݊S~w⋻/P?}nꩧvN8a 3^zv 7!Mڧ&-2ꫯvG1ҥӧ146)&`(E`K0o!`* D5M=7|S|M7R$ٗ͟⺣A:x˖o1ECZfo-׷v۲!`SXOH-guVw(!`0M :q;iPN+H&E<xWK,3-C( `| J4+",b̽"!P[d-]Mi8!`FT41'Fs;oƍss1Gae0MYXbgyK/eի1T&&ͭce+ 2;C0 :>v7;kѣG~}G{?駟/&tR7䓻])2,z`/b좋.zb(ea6cin #~i7|sׯ_?5_) nc9jUwW]u;|tMѮnI&̿{'ȑ#ݒK.̘59E|N6j}nذa .pz{{Cꩧ܅^F60C9D]'Y6cڲI}Ȑ!nnRƹ뮻jBY/"wꩧ~v?i+hc g *VFĎ=u3 &~7F@N9hD'u]W%ˠA28[o߾PlFJX3 VMӨJأ_s5n<$&A\s*в߲Mdѳ#b/悹 =Mf&,YE\d,QGXs1N(gyT5gqO@VCP?j7xC:g=^:VnLItr 0GU3-\ K0O b&fg]i}un?B̝M4Q=0x12ҌYѤu,.tԃ9&%XBk?hU=#_~e=4 3%H+2 F ab- >n^uJ?\ZuN>}tPXh W{{=zuIF@Z0֖I\_`i$XCcEigXCc޿Uxgwx:`Ā9,fA#vZEVc^3DB-ؿ:3(q"Khc"n:RpgySY&裏8q#O,,b-<)z~;':~b o+ߙw}2m@{/s63!|?O5TyӍYꪫtk0~ | 4#` >ͭpV]uUio^o}V5m|@^%ou})r! 6fW<ۃ02=c| T# !Pqn馲8~@,c]b@,&(b }nE dk )OZ7{@BJ|l\ '0.F@k R=&_[O q&_:J&* f #C 77N*uYG7;eJ[wuU 'Y1Z}&Z7,2ӿ[xf0gci &^{M]&Xij[o5 ʖh>Jts6іFNQa&x-䰖NbJC021X ۭ[7ww6>MRt[ny5SLYZ[dmF#FHJ>|f|Pɋw; #`: 7^*V[W^y7|cmu&e,pvGc̽^4#`*4NF6s*g}?*8Lu] ,'-3rH]TMsl@^칺`;N;&&QsԕPDI-.Ǣ1F.䒖vܖl&Y;j v}58l M lD~l"\sͥ ~+p8"Y^@<ؗ-cK.9Ӯ䯿zpGYH}8AY F۽?ݧn=a4_|uQo.Sn|9B3; T&dwܱbnjGf,nJPB 9,rw1S;6XI7$_Uam7PhO?*ȣQ= 4HTzήB\dRYTnPptw_z%/; G} >`gR %XBθ7o\5z1 2/]bbɦo٭uY1fG2TbG:w}+$zv"O7t],i馛+INmB|#Ca'?%qwW5Ny`M6Qn|D %fE%yM91Dxbg=nJh>,o6h#ō!ElS[-2WS,Q#<[nK]ֽzRjI˂!5g,/>Z5'tbfG}}\][0FJ[uUk$zw|ϴU j)TQǏoj[s14dt=:̎D~޸Z{ou03xqt$W8s]qdYc5 26F@0VIL7tn't'xKެq|.7\}H" 0vt\zF[ν[mC V̓Rp73μħ2dHz a:d(va?bZqZc-׿`Cu~x-]'r2*'z!4nHK/Q{piGLЀ9>]!+}0&lQ.첪Qˡ5 ~zkaNS 8 Q:)ޒf3]wuMC-:T616VD0aVN%XDbբCDf&&@Da"R;1cA)G{_~%-yC=) gѡ2Q.c-έ)Miѡ2/1qEaJksZtLe >۪Qڤ*-ri۶z6EJk\sخj! ӨQZ6 SZ֢CeW.c9kS0ݻsFCCMX"c9jU”ƴPj,|[L- SP2pɢCe2\Dcn<_t٢CeݲPjcYh*e(LU-PQXloѡ~i,14Je(LuG,:TF.6FU$T ߷Po4|Z2Ga5scDCeRVdc)kJű(LuvfE5uzDa߿#Q,^Yr{P7|ꪫ܀ޡQ'?Z>K34 {wtwqG;ɽ?nQ'Çw_}U?+>g7Z]}:Tk?"v=OmGw 'TŠې!C/DoZ^)cR|5Z1R3JO<mMz꩕-w.{챎]tѣlKo-Y$Hb6:-h cϴ웸kEz-//ay /t|ٮmy9য়~rmmmnرzu/Bc `a* g«!5M?zL3k.5u̽Zt;X=!x63C9nxF@5;S{;U0_|~n7.X>q`i_ )4L㦝vZGwgp<~ٍȍc~+=8  AäzFtbo$n:~1~x2{Knݺ9촑Kh'xG1/b-jVj l|}NXE}Cٳ:Xacg6!xltv٣GS̙&^L g|`^AϨ)2㏻{L~a]vu,2[̱Iɚc)l J/ӹx)=Z>*>>4:𪫮ҝwީ6h#W[mJI0߹p"ȑ#UP#CHў"mdQأ&61[`;`o8f|3 ,[~ ǒK.LQRѠg)'dt+NSW\qE7.2FBwy(2r?>U>2:2SiF@0#DaQY,j?73WB<2D"O^sN=JA^z%ذEoFUCEߏQeR/7]y:dt 闆C _0FÙ IA:at\Iu-nFսđa~3bƈ B%Q˼c._5J/l;3M6D6U<1;sg}6tSjؙ坘2[+tAwsp.<"/ɲU >fQ-jUM7䈩I(j=-2)Z>0Wf=7/b)OfڨX0Xdұʂ)&^8"+2bOџy智$:tC(C*q1(*c ^tE?Ee!clFeF-#s~ 漇,c@`0 * 51F)dU4bzSW FC@$n d1)AKJ*]_@Unvo¨o"A[ k_&E\ h@)TMLs)@lQ4~_F(vVꖭ[0xq/HP]{r-Q9.ևkv|;bW^"rHg >`YgUf*⋿ƛݎ#3ڱ@$+߁V۬g$Ad]w!rG&c)u,nG5౏Ʋk4˥S c]ߩw}:E X+</9340N:$.1{fgv(B+ E/I3 ^{w$!0q/88∊ϵ Bc 6lMd SJ[$jSŰ w'sx,&ʣOS9ܞ(ydpҀ* b$?J3^ juåOY1kTTLdLlZEqaЍM1k?eq]FypznÃR* u .pJ0g@$M(QdEFgZnyglVje*~߸@]aO?sfwC@7}PR+.oeSLJJTo2@pˀ"ϚT{%" v2IUu҆$*/(Ax>)aE0:||BJM򪫮fq 0ێ$DI; 8E>hN]/ &N̉_$dAF g  64JX EF."u0T.jDOh `6)(K% a McUW̮޾ER[C*3jnap*z|'vea ^C9DgHśE^3{mGA"hLx{V֙ȕFa0 ,ĔX 7տɇf_>Kcy| N DQZe*WFɇ3(tCv9*L8#[ ٬'aSfߤ_KP˼2gjxoɮod(7GH],Y".zB[ j.z&af@z}DDNd$Z4$VqWgX_s)L(=!`Ewq&7to Vwk; ǧ)țiMy3E]4(vD##GZV&vgtz2/vH;4,3G֦8Pamg,0'r\3[*8kMf HD"?fs[Q2F)2q1r7xE'v|{LMC a}l?9|3I_0.f~<(40&T6g]o=:3rlʥY|M; e1M#U0bۥT-82G$`رl #;CzY,WA ^ O$:|G zR$]v@%* G0n.b IPfoRQ5(9,q*MAHb۬HiQ1IP8$3Q/~*8s0>D0E-;t5F2!jW'.4O?T*9y001@hyu2'4k #*S%k94M|0r(ҙ8`aȨG|.n=8~$ F27(wGmX$r K b:qL/)Y;`p`{AU~t֭G8}ITо57P[ndAErA%}$wutr^Bጔ# ? H#oɖ|'%TPIАs-x \gtta ;o@h XЊ2u<&Q7f Pa }zAy戹ksk5, t"Iidd裸$VFuAAꮥG`bRrKld';U #UUVy&a5.[3Zz+!?d,6<\ńYu$1ub"XRQe ^~7G,H(4r{  S~؉F1DeHk~e`f[.j媆kE,6Dw;e $x+$H\av!b.ϖgT4VȎh-pIoDw[\><)O"2H\j;]3r3xw|FzSoVׯ_đAOvl$z 7lyu1xaW,Zy0X qCcGZ(- < r "oV٫3^{mLQC?M&. ʶfǚEQ3xo.b_޻wo".m ƺF:ݽ^#:W L{wM\cF_1HNҺ^-|vG۷N83j--Ҏ5hrӟsW\.Hie'ITIi#%;묳ܰaVHF֯,`{n\\c-v0xSIzkap8x;Ǜ 4!@$#t.Ԕvv >MXEb<κ^<7Lu$Zkzꥑ˲" }2NZ PEys\nݸ[nE7e,=Db#JJog^Xzl]u]3V4JÐ(H,Z7T4y٫Q`{Kc 6RYayՏ~X!efAge0wa_{Y@+K <0$Ѩ.,@ܩ24gF_#C )E_5G}oSO=, ~ =pb-ig0EXZw֪uq^?Zg~K3x┙~GaHxH>4QaVvi|21fcO(L;cCi7,4LFC R]kIwy|dfi6Yf`˺SO=wq'Z7m-_Z0$N0=1g1n12$7,w=QFs 'T,hlO{WnYGaY4,aw,k)[[L4 Q8 \!&Lidĥa4QZQs*O{|nꩧά||uVR(@frXL1d_}GR}?>#gR/3[80j&#g1uiy`5tᄏ#XH֬jrYbu뭷c͂ޫ@0ժS#aX{u3~wz>G]wUhEI+InW<\r%n=pM7][̑0K„4zo1s:_~QEy̘1nQO-{g;`v Q6F\b-dIa  5Y|@0vtpWNjwuWеk@U"L$^ KJ7UM4DwBoW_~@vÇv ͵VIg;B/xᇷo$o;"iJ32-h05 vh'܀#feMdY>sٱ`HUg}v 1=.UeLU*n\GZKz,p}ɊD V¼fۏٖ,PF܉nOqg/!=T"$N8mXt~Z˱0*_]6(L"D,-p#8b[",y~A83_ Y{o(Y #}MR|EP"+8f ^z@ 3tP-g!4+?:m,yT{W}56_i8d< ecJ_Ik) ,y*&/90EZ|sQG_ ݻb6/ ~6łF0QLE۷!L_~EOSl' ߺв\qfM""p+y}9[n]򯃗Nd6}sJ(>5laÆN=jgAuD@$Z{vSwټ"]'Z{6TܛIK,eNthNtij +KR|㎪%q%T4#SNI1ԍLum _6f먆piblWeYm; 3}\"zze›?ƍ9TZݑ⒔iƳ]ܕaޟuYUDd՟3ʻ!qpb-0ӱcǺz$)L:9[6%*@}{ s<]|vi53@Y[-nЃw% (&gQx`KEf*ơw}{= bm&i3?Y89) FL=nyu̴q;&lRq-Jr^N#S3ihcmA0aplfY|ݫ/;\t.3 5;0mql6d@o' 7q,fp 0<!C8zؼ _3m=滤p<+Ɲm0ڐx`pq<1 .RjR*}8? b.M!d<*x =< z쩌uE ^#DgnL!p 000`E'AZ'LG]5vi2,LQ*3z&f0'8yԈN)l^#FhB+LփT0`J-X3$$lGJT7W.?^?AxmK/i!^z: k1NKa"JpwyZI/G~Ԗ.ov:LsVf^ljf`0z'!}.n6҃:12quLO޽ h[_]? plvXnKpyꩧЃ~ &i&7ZpԨQ:!!m> MW\q_kcZ)  uM#D`ew}L,L>@a i4*[0.vmH S첋#k"adz~):_5A'>Kp+C k]Z~:3}W~  ԅn*kCԥEE#S= ':U@+6H+jߒd ; j}eH1\gyf u3 $z@ΰC!Vq'2 D*>Oc9&[:*)FHZ7N:)X&uДQV,\Oؔ‡Dć9IU?EBK :Ja 7Ti6/U[pdvD5N A LfEO:t=S~ITn*\1Dk j7㨫L(Gbz"J&3ek?v"L9Y,UF!:Y]^}S .kZZ1@\ǥX^DM\vD=6 Jg=Pbyzi,򥯐UJ$戁؊k7Ÿ*Oz^Y {(buH T-bѤ2SO=5`YUW]ip]jq6 BleSPAv@  NbAS8FFRo1`]#XbFH5p@USX}`1((0}0JdCUpuץguZ "ŷ2%Sw^ꠀ~YvE6өTeqP vcԉ:F\F2i3l=Fw9u&z5) HU o MXCX`EF#&l=jvL~+Ub(<ڏ/S(~D" l`R 5i{DbdeڤE0wqagRGQNYmߺ/ە~fXk`Z^m4 cN$”bѿuԥ0 dǻ6e;A, yfLIL[/$\ln)!88S5QA=`VL/Aa#`9['k2=yI &9j8l^P9(]}5ɽ}6ƞ0shfV Ru$H\&Xs_&1ğ 6tӨ*O['yC`wMt(&,)Ufbf8 3^\f<&?LȌ:Q&j!,uV->Z vMsrt Lc/>W]uU"/i@tN[vP nh)hpKLD\y6LO?7d(?$ٍ"zوM R+!Q/*r}bH8&ŵ^z6?c7 jj$oF)f w %ȌX{me_~庹lXva z#/B>'r^=^|ŕl 7ܠp P5W_v?֊nV v 2=s[\vejnyW-%pCS<-9Jw̨Cpї٭ lv$o5\j$ovo>FYMw62*qHfV]aA=\es8+$(<(8+ذô4Ls>8RFX.ϨA3I4*n\M ´ׅ^!i=⍂[qa)un6Vq QJHB  / ETz$@81sGO>0j)9rTf|v j"N #8q(*B|Bi1+/S 8N$,o5m^juAL:p%i\T I`IF6lX,NK@a vG$02 #}/?ʱX{D] 1sXl UE]ӑ!f6b}؀Eg;ӧڙ{EZ+"NG}O,KO(2Qq@Î@m%kÃ׋V5aڅIlӷ Ѽ'v X7C>SB:~nScL1L*2GFjΎVV U KO[q!ًV{EI{0s=l'ArdjE'YIsO'ժGi@U'f?̢ SEըf mT{t 0vXU'?lgLл.QzFU*+̜EZ{m,HQ7@$p$?ԅa_DD U\cچʕf''so3TU-jrfo8F$|zx"K. R?|uqKtӣjttlĪFe&DUM\D;0&QD҅Ǩү.m諒J~#Qs0 u) ZXӔ#,ȿ]WTJUP{ʉ*9* jv金S2><.s7ٌ.]Ly}Uz Pȇ 1CC2b$Xb@pˋOuկ3 x+9_:_xGc#yo1z-[)!yr}^JPEE|K ]./ ƨJ8ѣ^T}Ab=hm} sX /xFElǂIƂ }NbaR* N5hK I4nXdU>GA0Tm34SMKe4D@ aV j묳N Rp!i1,#ΥgxGIyZpCP~e t@> }Vs@@\,8}O,X`!r _Hp]$LP#/.3%N0Ǜ-j+ꌟ(I>Pݎ{0 mUV42WӢ{Qޭ|jĎDvvBXЮݺuӀ2j\[ ;xI\# <׫W/F?e L.|0MCZ{Ed܃)*뮻n؛+0k*:vYkbʬQX]f`7goJ:7s`1 x?XZ|**婢`F&SX}\wYD+ ͇!E:h"UL+\ ͰP nP_(΃Է-S)h8&_x"gi0;V( + ٨^ywF9!hbUf4-QJ$ T:Y"挠Z u$JY%p:u7߼7" ]13UTHe /SA k7YxR革b!&β'QQK即j+F Y30Dj-S #e޷Ka#.YLع?%K/L}qj5M5*QE7ZQuj3fH,j#k"j!`烍{*vrT蕍*#>, VK&|"5>F%btأ:ʚ ^t< 1 X`4 ˰0!8ȑ#KJ$)!)6j! Xi%bm0SE.L%EKqS!pXDtɛrs0c.G<+|oNֈ~Ĺ՗uY|D2 %L&XC$)VdĩT a*=Rŗd4ȇ&B,= 0*c0 a9t:w+y%Vy陼H2t,3T?1=v 4zܥvDvR):U"="i/ }/`lTd`т]vBF80H`mqO:YDKhlp W*Js>)f_v5 LG$_S6dG5T1%M; Ś{YPE:XHh K'9 xEG֎5n -[&x[Hf;>R,dy_a}״{Aڬ SFtCoK#Aq"Hb@ŒƝw8 ޗ]vE%c-:ihNa_GBQ]BǝvU7Fizf晈P& u1Jʔ% 1c.GEaJ< `! H`L1-;c4hZ|k)? :(CI֑ؑrVTIߗ$zbَu@S=P>t9Wdq[} lC Q;~!jU,1aC-k[@%S2z%Pqi $x#Q}Gt\x Wg) SwO<W^6J 7Љ;Z'sCUm19ԤeaIpe'j=4῟~H@g#`Wtgztc˟O>w`$>N%O2d Hh=>}8ai*E?a!OekJ(9`0w"k$*4\gxM0ѣGK@Pp38JD{yWҨ=\qDE8o*'BQ"dM9 HP,|b '^4*B%<$Stf`EHBz-$/3gGpo٣Qd+"5,j4,H1J X7Gɇ Ӈގ Cfɇ L4~+L|D"gёB6 K(1If#s3 6Q f,H,R4mJHBI05D"Ӷ|o[ڑ6䠍h? D{̉zЉjPX ]$ JJ aYks|&w7ی3Yێ#*ѻ8&?6m!*Z!3 I E)= \hE3_΄9A0tR  gbI"%Ҕ\Dq6#ڎ6t ` fb IΖ1p+b`cAô!Csu;8<Ǿ;Z#(K1R|- ΐ=gم若 :R? n=m aРA\tOhSd@D@ۍ[[!m+{wcX[o5˄JI,Te)nIJ|HY^"ʎt>77JG$Y8ڊ"U!Ś]fkh[V!P;sBRJb1Na2Zq*i2PtY C <؉"8dm2 >-C X-d{f\ZP/n[o*! CQ$FJv#h:IU1$[6 ݴGydg{1ǍgF`СН~9اf$|[!)pk#8Sa$6kOC2@MN:$7, $,0bG`VpWU7@5*q02DHSo z<.˥0R1p]<@ZuY)1D`L CQ;8ـ6MO{KY C a=pyRQ2W>M/]2 C.w;5пC&I !vO>Y}ZKOOk Y C r!ZaÆ|%>tdIny@sgy\o:g6)2!`4^ tZQ$@PrcQ ki@ڵ;\RW_S$V C}5SʍyqLOSXY C [MW^ƌW $V&'ela!6۸E]az7( .`bE2!`@.] #Gt>lX6ofa744N3/o={0R@>}+ H -PO~0D~={LTPhc 5diD`UWulA*tMOc/2@f s=n@O>D~+dl|[ކ!:K.jԢ?=z|W_#{|dZ†!'p{6>2h{fJ"B PC ?yuw+rAGީ1([nœ-"Xc5DMVYe5-b.ixCbCcU/~B|7x."䃈S+*..gA{u믿~;/񑮹/_ 3tH-0`7`0^x0r0r̗ò o?)%J${j/q#CXA]2߇{ÒaX\|yu6ҖOaĹ e/GV[mvaa]`$4O,0^z9c@1SL1&lN?o' 6|["2;Cs@;ؽz 7tz"(?FAU@~cu|Ѥr\Q=}z“.{w`́:Ɵq^+>T>&%?O5T39خR3SZC= eK5裏ԁN vU7.qѣZ<`={t ,@ߘn33yy;v^~`44ӸfA}rx\̔Vq@zc@Q믿j^z``?Еw·vJK)39p.T hxaT@ǫ0B\ċ!*"GlQkzśߜS^:/H,q%ftIƆOIgS~&ĸqt@bW#_,"3ModߡƠKS3s `O_|Q֒XtMb >nپ;'駟\b̛aiAFgS!1={ߜ/NK]Y颋.Z]vYe`?W5Kx! =:c`A0n4/"lԬ ~g:DɈH?c׃ .Z^^xA>j%Guw#(qNR ?n;֓)߹R>'_wu%<~6VCژ d#FЯ_@t} d1ca^[ >s=7jQC)2bK. DTdtMHHA߾}syZ "(]p|"lTķ+j">( 7J0+5 nT$1Kn~?uo33=sz 7uT}7ߴ>f#eaMNT])J8ѣGMte&o[/}y Si22vL/LV_@[Lkfu%!ySӔd`v2% 0m5XqA.Snl24~?1*UM鞽C;Q?lX1G` }J _~7%-2\O6+y_B1e|:^⒦x_^۶i֔g\sM7dʮXke`ɔ)v~8w}is_DkC {ԉE+RR^ #ܔE1{Mڧ~X+iߔaw5ՉxO>7 +u{N?\RFLMvU4lSʞ7/Pr'$l̏{<lQ:a&Bx C۷{vcqMU˚ҕMEYJێnͳ4."Ok,ў5?rytxO%45@hQjQLX9y0ӴS:#*GM8hW |ᇞWkqؕo_ Z*Ó3Ϩ~[Ϻ# ᅨk_4; IA0/BwؑTo(3eκagiUnKZ)͊v,AEMZ%1&ɢP؝7trg[$X G!|wF<\M(:s=r92N R$hY%fv0FŴ)| BQϽ޻b2.RuS]Ã+fG} sGKY;\%u.2FsCT~(0^r%+-2IW_}uy Ek8^|t7/|, /0AZO={8^ӇUQ D'pB$Fݡƒy1IT/ÃC -bTٿO{P_dm"j0`BE Ks=S˼t ͱ@p@$Uv1$NmAVEA!FmڇDF>ҘdB}Gu&RII믏ێLiF>3n6HOB[z3PU;Qd+/>)bD/X\#[6Hʩ1JsPNEciڇ2YNu⧤Qm]*R9o$U 3dQTe4~qKi;$U"ču"0Գ<5'.&KUW]xBqwo1)?۾>O~S̝Էh)1p"xa/2 \IDATqz|n?2f3%8De. <:S+5qXT1~Ki 4as+0/*2-pYHY`5xPf eՔ.XƤV||ײ=;R]On{u ܮTc ^ 7/:;ΓX|cw9J ,[ O~C3zDrᇉws/bM ¬ nYեaFP R'JD2EJ0]wm2/3c2TaQS)u'ωlE~pte_0*k?+_>q822Ip`hMgcR_nzvkUGua(7:*W@\0o6nFN#$ <묳¤,f~z\ôs _i}^N2x=Z]oFKx`¢cr00Vj!\Ȑd0{WF?L3Kς0āsJ!8I#׻O?uMFkSوl , 0 KTߜ ldI?D7 Y/H5ÅL2xCd ¾3H$W`K>7\z饾[o7ߤQNha"e 62m8^%wQ #vxE>/ܯڶ0#GL=TSO=5w}0$U@{-]v*R1g o9ƟI”"` 켕2Nخ ASrOg#b&iS@ 2y#P(2hԜPik31NS*±JL`;2LpؔsozlSJΎ;Lg?+mz '"gߐ9Kr i1y& .gKZЫM cל"$V Vʥi CO2O/2RtafTqy]ԇQ/ӈ4"12x .׵2PMu61ݬ B̓v#M,3Ǯh?W݄4Y93)6C`\86=!>Z9rBumb.1 JF' 0_7z^9x5Z)mEttA>Ddfi<y2)(!ќк-XWA:r^Ʈ+ BlJym‹b VIb?궕-χ0eH f&I~`lI )m1f)ܙ\Z=-5d27&6͸>`1HSjN2|nИ*L h64᳘e64ZJ4LV@Q /6]HZ\$1C~] !+? 4 gqwiڬ3kwdni4@ޛ*`:M0SS*xrm=/H o x*)u0R?Ci'Ree@]_) S^m*z3M`v擻 ަ鋾ara4ȃLQP9/axN{t5  2^UjKxO .*}PQDMۀeu ,+tXSyBNQH3ꀶ6@E[.QޢC8S6?!sB-;?KO=S9tI^ g79^:NhԘ*AH#Eaa3@cGu}W? 6x"BH0gwfpg8wȨ <+xCo, Q|0J'w4'p {p>XJ~I`lADžN'i۶mNx';$3U1!J>}J߂5Ts}^"P>^b( |[6hwze8A6o^a%ҵ6R8s1 ;ۆlp ńӂÂƀ#x1@s>F b4@qcy=tG_곗N~6xܰ7E!%w/3<=?<oH_ 6OAy<84 Ӧ@h`4~ny\ph 4)/xBBIENDB`protoc-gen-star-2.0.3/testdata/graph/000077500000000000000000000000001440740147700174625ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/README.md000066400000000000000000000005251440740147700207430ustar00rootroot00000000000000# AST Graph Test Data This directory contains various test proto file sets for black-box testing of the AST gatherer `graph`. Proto files are preprocessed to their descriptors, imported directly into the `ast_test.go` tests, and unmarshaled as a `DescriptorFileSet`. ## To Generate From the project root: ```sh make testdata-graph ``` protoc-gen-star-2.0.3/testdata/graph/extensions/000077500000000000000000000000001440740147700216615ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/extensions/everything.proto000066400000000000000000000030641440740147700251350ustar00rootroot00000000000000 syntax = "proto3"; package extensions; option go_package = "extensions"; option (extensions.ext.owner) = "IDL Tools"; import "google/protobuf/descriptor.proto"; import "google/protobuf/wrappers.proto"; import "extensions/ext/api.proto"; import "extensions/ext/data.proto"; message RootMessage { option (extensions.ext.annotated) = true; message NestedMessage {} enum NestedEnum { option (extensions.ext.ext) = {}; ZERO = 0 [(extensions.ext.numbers) = 1]; ONE = 1; TWO = 2; } NestedMessage nested_msg = 1 [(extensions.ext.name) = "reflection"]; NestedEnum nested_enum = 2; oneof union { option (extensions.ext.float) = 5.67; bool boolean = 5; string str = 6; bytes data = 7; } repeated NestedMessage rep_msg = 8; repeated RootEnum rep_enum = 9; repeated double rep_scalar = 10; map scalar_map = 11; map recursive_map = 12; map enum_map = 13; google.protobuf.StringValue wkt = 14; } enum RootEnum { ZERO = 0; ONE = 1; TWO = 2; } message Request { extend google.protobuf.FieldOptions { string footer = 222333; } } message Response { } service API { option (extensions.ext.host) = "Alex Trebek"; rpc Do (Request) returns (Response) { option (extensions.ext.header) = "X-Foo=BAR"; } rpc Client (stream Request) returns (Response); rpc Server (Request) returns (stream Response); rpc BiDi (stream Request) returns (stream Response); } protoc-gen-star-2.0.3/testdata/graph/extensions/ext/000077500000000000000000000000001440740147700224615ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/extensions/ext/api.proto000066400000000000000000000003751440740147700243240ustar00rootroot00000000000000syntax = "proto3"; package extensions.ext; option go_package = "ext"; import "google/protobuf/descriptor.proto"; extend google.protobuf.ServiceOptions { string host = 111111; } extend google.protobuf.MethodOptions { string header = 222222; } protoc-gen-star-2.0.3/testdata/graph/extensions/ext/data.proto000066400000000000000000000010701440740147700244550ustar00rootroot00000000000000syntax = "proto3"; package extensions.ext; option go_package = "ext"; import "google/protobuf/descriptor.proto"; extend google.protobuf.MessageOptions { bool annotated = 123123; } extend google.protobuf.FieldOptions { string name = 456789; } message EnumExtension {} extend google.protobuf.EnumOptions { EnumExtension ext = 101112; } extend google.protobuf.EnumValueOptions { repeated int32 numbers = 131415; } extend google.protobuf.OneofOptions { double float = 161718; } extend google.protobuf.FileOptions { string owner = 192021; } protoc-gen-star-2.0.3/testdata/graph/info/000077500000000000000000000000001440740147700204155ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/info/info.proto000066400000000000000000000014641440740147700224420ustar00rootroot00000000000000// syntax syntax="proto3"; // package package graph.info; // root message message Info { // before message message Before {} // before enum enum BeforeEnum { // before enum value BEFORE = 0; } // field map field = 1; // middle message message Middle { // inner field bool inner = 1; } // other field repeated int32 other_field = 2; // after message message After {} // after enum enum AfterEnum { // after enum value AFTER = 0; } // oneof oneof OneOf { // oneof field After oneof_field = 3; } } // root enum comment enum Enum { // root enum value ROOT = 0; } // service service Service { // method rpc Method(Info) returns (Info); } protoc-gen-star-2.0.3/testdata/graph/messages/000077500000000000000000000000001440740147700212715ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/messages/embedded.proto000066400000000000000000000007021440740147700241060ustar00rootroot00000000000000syntax="proto3"; package graph.messages; import "messages/scalars.proto"; import "google/protobuf/duration.proto"; message Before {} message Embedded { message NestedBefore {} Before local_before = 1; After local_after = 2; NestedBefore nested_before = 3; NestedAfter nested_after = 4; Scalars external_in_package = 5; google.protobuf.Duration external_3rd_party = 6; message NestedAfter {} } message After {} protoc-gen-star-2.0.3/testdata/graph/messages/enums.proto000066400000000000000000000007761440740147700235170ustar00rootroot00000000000000syntax="proto3"; package graph.messages; import "messages/enums_ext.proto"; import "google/protobuf/type.proto"; enum BeforeEnum { BEFORE_VALUE = 0; } message Enums { enum NestedBefore { BEFORE_VALUE = 0; } BeforeEnum before = 1; AfterEnum after = 2; NestedBefore nested_before = 3; NestedAfter nested_after = 4; External external_in_package = 5; google.protobuf.Syntax external_3rd_party = 6; enum NestedAfter { AFTER_VALUE = 0; } } enum AfterEnum { AFTER_VALUE = 0; } protoc-gen-star-2.0.3/testdata/graph/messages/enums_ext.proto000066400000000000000000000001131440740147700243600ustar00rootroot00000000000000syntax="proto3"; package graph.messages; enum External { EXT_VALUE = 0; } protoc-gen-star-2.0.3/testdata/graph/messages/maps.proto000066400000000000000000000023251440740147700233200ustar00rootroot00000000000000syntax="proto3"; package graph.messages; import "messages/scalars.proto"; import "messages/enums_ext.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/type.proto"; message BeforeMapMsg {} enum BeforeMapEnum { BME_BEFORE = 0; } message Maps { message NestedBeforeMsg {} enum NestedBeforeEnum { BME_BEFORE = 0; } map scalar = 1; map before_msg = 2; map after_msg = 3; map before_enum = 4; map after_enum = 5; map nested_before_msg = 6; map nested_after_msg = 7; map nested_before_enum = 8; map nested_after_enum = 9; map external_in_package_msg = 10; map external_in_package_enum = 11; map external_3rd_party_msg = 12; map external_3rd_party_enum = 13; // this is a message! message NestedAfterMsg {} // this is an enum! enum NestedAfterEnum { AME_AFTER = 0; } } message AfterMapMsg {} enum AfterMapEnum { AME_AFTER = 0; } protoc-gen-star-2.0.3/testdata/graph/messages/oneofs.proto000066400000000000000000000002351440740147700236470ustar00rootroot00000000000000syntax="proto3"; package graph.messages; message OneOfs { string before = 1; oneof oneof { int32 inside = 2; } bool after = 3; } protoc-gen-star-2.0.3/testdata/graph/messages/recursive.proto000066400000000000000000000005461440740147700243720ustar00rootroot00000000000000syntax="proto3"; package graph.messages; message Recursive { Recursive recurse = 1; } message Circular { message Rock { Scissors beats = 1; } message Paper { Rock beats = 1; } message Scissors { Paper beats = 1; } } message RepeatedRecursive { map map_val = 1; repeated RepeatedRecursive list_val = 2; } protoc-gen-star-2.0.3/testdata/graph/messages/repeated.proto000066400000000000000000000021631440740147700241510ustar00rootroot00000000000000syntax="proto3"; package graph.messages; import "messages/scalars.proto"; import "messages/enums_ext.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/type.proto"; message BeforeRepMsg {} enum BeforeRepEnum { BRE_BEFORE = 0; } message Repeated { message NestedBeforeMsg {} enum NestedBeforeEnum { BME_BEFORE = 0; } repeated string scalar = 1; repeated BeforeRepMsg before_msg = 2; repeated AfterRepMsg after_msg = 3; repeated BeforeRepEnum before_enum = 4; repeated AfterRepEnum after_enum = 5; repeated NestedBeforeMsg nested_before_msg = 6; repeated NestedAfterMsg nested_after_msg = 7; repeated NestedBeforeEnum nested_before_enum = 8; repeated NestedAfterEnum nested_after_enum = 9; repeated Scalars external_in_package_msg = 10; repeated External external_in_package_enum = 11; repeated google.protobuf.Duration external_3rd_party_msg = 12; repeated google.protobuf.Syntax external_3rd_party_enum = 13; message NestedAfterMsg {} enum NestedAfterEnum { AME_AFTER = 0; } } message AfterRepMsg {} enum AfterRepEnum { ARE_AFTER = 0; } protoc-gen-star-2.0.3/testdata/graph/messages/scalars.proto000066400000000000000000000006351440740147700240120ustar00rootroot00000000000000syntax="proto3"; package graph.messages; message Scalars { double double = 1; float float = 2; int32 int32 = 3; int64 int64 = 4; uint32 uint32 = 5; uint64 uint64 = 6; sint32 sint32 = 7; sint64 sint64 = 8; fixed32 fixed32 = 9; fixed64 fixed64 = 10; sfixed32 sfixed32 = 11; sfixed64 sfixed64 = 12; bool bool = 13; string string = 14; bytes bytes = 15; } protoc-gen-star-2.0.3/testdata/graph/nested/000077500000000000000000000000001440740147700207445ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/nested/nested.proto000066400000000000000000000010631440740147700233130ustar00rootroot00000000000000syntax="proto3"; package graph.nested; message Foo { Bar x = 1; // usage before declaration // nested message message Bar { Baz a = 1; // usage before declaration // doubly nested enum enum Baz {VALUE = 0;} Baz b = 2; // usage after declaration // doubly nested message message Quux {} Quux c = 3; } Bar y = 2; // usage after declaration // same name, different scope enum Baz {VALUE = 0;} Baz shallow = 3; Bar.Baz deep = 4; // usage of deeply nested child enum } protoc-gen-star-2.0.3/testdata/graph/packageless/000077500000000000000000000000001440740147700217445ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/packageless/packageless.proto000066400000000000000000000003121440740147700253070ustar00rootroot00000000000000syntax="proto3"; // no package declaration! message RootMessage { RootEnum field = 1; message NestedMsg {} enum NestedEnum { VALUE = 0; } } enum RootEnum { VALUE = 0; } protoc-gen-star-2.0.3/testdata/graph/services/000077500000000000000000000000001440740147700213055ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/graph/services/services.proto000066400000000000000000000012001440740147700242060ustar00rootroot00000000000000syntax="proto3"; package graph.services; message BeforeRequest {} message BeforeResponse { int32 foo = 99; // comment } service Empty {} // unary only methods service Unary { // message come before rpc UnaryBefore(BeforeRequest) returns (BeforeResponse); // messages come after rpc UnaryAfter(AfterRequest) returns (AfterResponse); } service Streaming { rpc ClientStream(stream BeforeRequest) returns (BeforeResponse); rpc ServerStream(AfterRequest) returns (stream AfterResponse); rpc BiDiStream(stream BeforeRequest) returns (stream AfterResponse); } message AfterRequest {} message AfterResponse {} protoc-gen-star-2.0.3/testdata/protoc-gen-example/000077500000000000000000000000001440740147700220675ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protoc-gen-example/jsonify.go000066400000000000000000000057161440740147700241100ustar00rootroot00000000000000package main import ( "text/template" pgsgo "github.com/lyft/protoc-gen-star/v2/lang/go" pgs "github.com/lyft/protoc-gen-star/v2" ) // JSONifyPlugin adds encoding/json Marshaler and Unmarshaler methods on PB // messages that utilizes the more correct jsonpb package. // See: https://godoc.org/github.com/golang/protobuf/jsonpb type JSONifyModule struct { *pgs.ModuleBase ctx pgsgo.Context tpl *template.Template } // JSONify returns an initialized JSONifyPlugin func JSONify() *JSONifyModule { return &JSONifyModule{ModuleBase: &pgs.ModuleBase{}} } func (p *JSONifyModule) InitContext(c pgs.BuildContext) { p.ModuleBase.InitContext(c) p.ctx = pgsgo.InitContext(c.Parameters()) tpl := template.New("jsonify").Funcs(map[string]interface{}{ "package": p.ctx.PackageName, "name": p.ctx.Name, "marshaler": p.marshaler, "unmarshaler": p.unmarshaler, }) p.tpl = template.Must(tpl.Parse(jsonifyTpl)) } // Name satisfies the generator.Plugin interface. func (p *JSONifyModule) Name() string { return "jsonify" } func (p *JSONifyModule) Execute(targets map[string]pgs.File, pkgs map[string]pgs.Package) []pgs.Artifact { for _, t := range targets { p.generate(t) } return p.Artifacts() } func (p *JSONifyModule) generate(f pgs.File) { if len(f.Messages()) == 0 { return } name := p.ctx.OutputPath(f).SetExt(".json.go") p.AddGeneratorTemplateFile(name.String(), p.tpl, f) } func (p *JSONifyModule) marshaler(m pgs.Message) pgs.Name { return p.ctx.Name(m) + "JSONMarshaler" } func (p *JSONifyModule) unmarshaler(m pgs.Message) pgs.Name { return p.ctx.Name(m) + "JSONUnmarshaler" } const jsonifyTpl = `package {{ package . }} import ( "bytes" "encoding/json" "github.com/golang/protobuf/jsonpb" ) {{ range .AllMessages }} // {{ marshaler . }} describes the default jsonpb.Marshaler used by all // instances of {{ name . }}. This struct is safe to replace or modify but // should not be done so concurrently. var {{ marshaler . }} = new(jsonpb.Marshaler) // MarshalJSON satisfies the encoding/json Marshaler interface. This method // uses the more correct jsonpb package to correctly marshal the message. func (m *{{ name . }}) MarshalJSON() ([]byte, error) { if m == nil { return json.Marshal(nil) } buf := &bytes.Buffer{} if err := {{ marshaler . }}.Marshal(buf, m); err != nil { return nil, err } return buf.Bytes(), nil } var _ json.Marshaler = (*{{ name . }})(nil) // {{ unmarshaler . }} describes the default jsonpb.Unmarshaler used by all // instances of {{ name . }}. This struct is safe to replace or modify but // should not be done so concurrently. var {{ unmarshaler . }} = new(jsonpb.Unmarshaler) // UnmarshalJSON satisfies the encoding/json Unmarshaler interface. This method // uses the more correct jsonpb package to correctly unmarshal the message. func (m *{{ name . }}) UnmarshalJSON(b []byte) error { return {{ unmarshaler . }}.Unmarshal(bytes.NewReader(b), m) } var _ json.Unmarshaler = (*{{ name . }})(nil) {{ end }} ` protoc-gen-star-2.0.3/testdata/protoc-gen-example/main.go000066400000000000000000000004231440740147700233410ustar00rootroot00000000000000package main import ( pgs "github.com/lyft/protoc-gen-star/v2" pgsgo "github.com/lyft/protoc-gen-star/v2/lang/go" ) func main() { pgs.Init( pgs.DebugEnv("DEBUG"), ).RegisterModule( ASTPrinter(), JSONify(), ).RegisterPostProcessor( pgsgo.GoFmt(), ).Render() } protoc-gen-star-2.0.3/testdata/protoc-gen-example/printer.go000066400000000000000000000052361440740147700241070ustar00rootroot00000000000000package main import ( "fmt" "io" "strings" "bytes" pgs "github.com/lyft/protoc-gen-star/v2" ) type PrinterModule struct { *pgs.ModuleBase } func ASTPrinter() *PrinterModule { return &PrinterModule{ModuleBase: &pgs.ModuleBase{}} } func (p *PrinterModule) Name() string { return "printer" } func (p *PrinterModule) Execute(targets map[string]pgs.File, packages map[string]pgs.Package) []pgs.Artifact { buf := &bytes.Buffer{} for _, f := range targets { p.printFile(f, buf) } return p.Artifacts() } func (p *PrinterModule) printFile(f pgs.File, buf *bytes.Buffer) { p.Push(f.Name().String()) defer p.Pop() buf.Reset() v := initPrintVisitor(buf, "") p.CheckErr(pgs.Walk(v, f), "unable to print AST tree") out := buf.String() if ok, _ := p.Parameters().Bool("log_tree"); ok { p.Logf("Proto Tree:\n%s", out) } p.AddGeneratorFile( f.InputPath().SetExt(".tree.txt").String(), out, ) } const ( startNodePrefix = "┳ " subNodePrefix = "┃" leafNodePrefix = "┣" leafNodeSpacer = "━ " ) type PrinterVisitor struct { pgs.Visitor prefix string w io.Writer } func initPrintVisitor(w io.Writer, prefix string) pgs.Visitor { v := PrinterVisitor{ prefix: prefix, w: w, } v.Visitor = pgs.PassThroughVisitor(&v) return v } func (v PrinterVisitor) leafPrefix() string { if strings.HasSuffix(v.prefix, subNodePrefix) { return strings.TrimSuffix(v.prefix, subNodePrefix) + leafNodePrefix } return v.prefix } func (v PrinterVisitor) writeSubNode(str string) pgs.Visitor { fmt.Fprintf(v.w, "%s%s%s\n", v.leafPrefix(), startNodePrefix, str) return initPrintVisitor(v.w, fmt.Sprintf("%s%v", v.prefix, subNodePrefix)) } func (v PrinterVisitor) writeLeaf(str string) { fmt.Fprintf(v.w, "%s%s%s\n", v.leafPrefix(), leafNodeSpacer, str) } func (v PrinterVisitor) VisitFile(f pgs.File) (pgs.Visitor, error) { return v.writeSubNode("File: " + f.Name().String()), nil } func (v PrinterVisitor) VisitMessage(m pgs.Message) (pgs.Visitor, error) { return v.writeSubNode("Message: " + m.Name().String()), nil } func (v PrinterVisitor) VisitEnum(e pgs.Enum) (pgs.Visitor, error) { return v.writeSubNode("Enum: " + e.Name().String()), nil } func (v PrinterVisitor) VisitService(s pgs.Service) (pgs.Visitor, error) { return v.writeSubNode("Service: " + s.Name().String()), nil } func (v PrinterVisitor) VisitEnumValue(ev pgs.EnumValue) (pgs.Visitor, error) { v.writeLeaf(ev.Name().String()) return nil, nil } func (v PrinterVisitor) VisitField(f pgs.Field) (pgs.Visitor, error) { v.writeLeaf(f.Name().String()) return nil, nil } func (v PrinterVisitor) VisitMethod(m pgs.Method) (pgs.Visitor, error) { v.writeLeaf(m.Name().String()) return nil, nil } protoc-gen-star-2.0.3/testdata/protos/000077500000000000000000000000001440740147700177075ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protos/kitchen/000077500000000000000000000000001440740147700213345ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protos/kitchen/emptyservice.proto000066400000000000000000000002211440740147700251330ustar00rootroot00000000000000syntax = "proto3"; package kitchen; option go_package = "github.com/lyft/protoc-gen-star/testdata/generated/kitchen"; service EmptyService { } protoc-gen-star-2.0.3/testdata/protos/kitchen/kitchen.proto000066400000000000000000000027041440740147700240510ustar00rootroot00000000000000syntax = "proto3"; package kitchen; option go_package = "github.com/lyft/protoc-gen-star/testdata/generated/kitchen"; import "kitchen/sink.proto"; import "google/protobuf/timestamp.proto"; enum Style { CONTEMPORARY = 0; COTTAGE_CHARM = 1; MEDITERRANEAN = 2; COASTAL = 3; OLD_WORLD = 4; TRADITIONAL = 5; } message Kitchen { Style style = 1; Sink sink = 2; repeated string utensils = 4; map dish_counts = 5; repeated Color wall_colors = 6; map appliance_colors = 7; } message Color { oneof model { string pantone = 1; RGB rgb = 2; CMYK cmyk = 3; } double alpha = 4; message RGB { uint32 r = 1; uint32 g = 2; uint32 b = 3; } message CMYK { uint32 c = 1; uint32 m = 2; uint32 y = 3; uint32 k = 4; } } service Cooking { rpc Saute(SauteRequest) returns (SauteResponse); rpc DispenseIce(IceRequest) returns (stream IceResponse); rpc LoadFridge(stream GroceryItem) returns (LoadSummary); rpc OrderDrinks(stream DrinkOrder) returns (stream PreparedDrink); rpc CheckBestByDate(GroceryItem) returns (google.protobuf.Timestamp); } message SauteRequest {} message SauteResponse {} message IceRequest {} message IceResponse {} message GroceryItem {} message LoadSummary {} message DrinkOrder {} message PreparedDrink {} protoc-gen-star-2.0.3/testdata/protos/kitchen/sink.proto000066400000000000000000000021121440740147700233610ustar00rootroot00000000000000syntax = "proto3"; package kitchen; option go_package = "github.com/lyft/protoc-gen-star/testdata/generated/kitchen"; import "google/protobuf/timestamp.proto"; message Sink { Brand brand = 1; Material material = 2; string model = 3; uint32 basin_count = 4; google.protobuf.Timestamp installed = 5; enum Brand { KRAUS = 0; SWANSTONE = 1; HOUZER = 2; BLANCO = 3; KOHLER = 4; } message Material { Type type = 1; Finish finish = 2; enum Type { STAINLESS_STEEL = 0; COPPER = 1; GRANITE = 2; SOAPSTONE = 3; CERAMIC = 4; CAST_IRON = 5; } enum Finish { NONE = 0; POLISHED = 1; MIRROR = 2; HAMMERED_MIRROR = 3; BRIGHT_SATIN = 4; BRUSHED_SATIN = 5; } } } protoc-gen-star-2.0.3/testdata/protos/multipackage/000077500000000000000000000000001440740147700223555ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protos/multipackage/bar/000077500000000000000000000000001440740147700231215ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protos/multipackage/bar/baz/000077500000000000000000000000001440740147700236755ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protos/multipackage/bar/baz/quux.proto000066400000000000000000000003331440740147700257630ustar00rootroot00000000000000syntax = "proto3"; package baz; option go_package = "github.com/lyft/protoc-gen-star/testdata/generated/multipackage/bar/baz"; message Quux { oneof id { uint32 number = 1; string name = 2; } } protoc-gen-star-2.0.3/testdata/protos/multipackage/bar/buzz.proto000066400000000000000000000003471440740147700252040ustar00rootroot00000000000000syntax = "proto3"; package bar; option go_package = "github.com/lyft/protoc-gen-star/testdata/generated/multipackage/bar"; import "multipackage/bar/baz/quux.proto"; message Buzz { uint64 id = 1; baz.Quux quux = 2; } protoc-gen-star-2.0.3/testdata/protos/multipackage/foo/000077500000000000000000000000001440740147700231405ustar00rootroot00000000000000protoc-gen-star-2.0.3/testdata/protos/multipackage/foo/fizz.proto000066400000000000000000000003571440740147700252140ustar00rootroot00000000000000syntax = "proto3"; package foo; option go_package = "github.com/lyft/protoc-gen-star/testdata/generated/multipackage/foo"; import "multipackage/bar/buzz.proto"; message Fizz { uint64 id = 1; bar.Buzz three_five = 2; } protoc-gen-star-2.0.3/testutils/000077500000000000000000000000001440740147700166105ustar00rootroot00000000000000protoc-gen-star-2.0.3/testutils/loader.go000066400000000000000000000123641440740147700204130ustar00rootroot00000000000000package testutils import ( "io" "io/ioutil" "os/exec" "path/filepath" pgs "github.com/lyft/protoc-gen-star/v2" "github.com/spf13/afero" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) // The T interface represents a reduced API of the testing.T and testing.B // standard library types used by the Loader. type T interface { Logf(format string, args ...interface{}) Fatal(args ...interface{}) Fatalf(format string, args ...interface{}) } // Loader is a testing utility that can resolve an AST in a variety of manners. // The loader can be used to provide entities to test functions. type Loader struct { // Protoc specifies the path to the `protoc` executable. If empty, the Loader // attempts to execute protoc via PATH. Protoc string // ImportPaths includes any extra -I (or --proto_path) flags to the protoc // execution required to resolve all proto dependencies. ImportPaths []string // BiDirectional specifies whether or not the AST should be resolved with // bidirectional AST resolution. BiDirectional bool // FS overrides the file system used by the Loader. FS must be nil or an // instance of *afero.OsFs if LoadProtos is called. FS afero.Fs } // LoadProtos executes protoc against the provided files (or globs, as defined // by filepath.Glob), returning a resolved pgs.AST. The test/benchmark is // fatally stopped if there is any error. // // This function requires the Loader's FS field to be nil or an instance of // *afero.OsFs, otherwise, t will be immediately failed. func (l Loader) LoadProtos(t T, files ...string) (ast pgs.AST) { switch l.FS.(type) { case nil, *afero.OsFs: // noop default: t.Fatal("cannot use LoadProtos with a non-OS file system") return nil } protoc := l.resolveProtoc(t) targets := l.resolveTargets(t, files...) l.withTempDir(t, func(tmpDir string) { tmpFile := filepath.Join(tmpDir, "fdset.bin") args := l.resolveArgs(tmpFile, targets) if out, err := exec.Command(protoc, args...).CombinedOutput(); err != nil { t.Fatalf("protoc execution failed with the following error: %v | Std Out/Err: \n%s", err, string(out)) return } ast = l.LoadFDSet(t, tmpFile) }) return ast } // LoadFDSet resolves an AST from a serialized FileDescriptorSet file path on // l.FS. The test/benchmark is fatally stopped if there is any error. func (l Loader) LoadFDSet(t T, path string) (ast pgs.AST) { fs := l.resolveFS() file, err := fs.Open(path) if err != nil { t.Fatalf("unable to open fdset from path %q: %v", path, err) return nil } defer func() { if ferr := file.Close(); ferr != nil { t.Logf("unable to close fdset from path %q: %v", path, ferr) } }() return l.LoadFDSetReader(t, file) } // LoadFDSetReader resolve an AST from a serialized FileDescriptorSet in r. The // test/benchmark is fatally stopped if there is any error. func (l Loader) LoadFDSetReader(t T, r io.Reader) (ast pgs.AST) { raw, err := ioutil.ReadAll(r) if err != nil { t.Fatalf("unable to read fdset: %v", err) return nil } fdset := &descriptor.FileDescriptorSet{} if err = proto.Unmarshal(raw, fdset); err != nil { t.Fatalf("unable to unmarshal fdset: %v", err) return nil } d := pgs.InitMockDebugger() defer func() { // Recovery here is required if either Process panics due to how the MockDebugger // short circuits the processor (which can currently cause an NPE). if err := recover(); err != nil { buf, _ := ioutil.ReadAll(d.Output()) t.Fatalf("failed to process fdset:\n%s", string(buf)) ast = nil } }() if l.BiDirectional { ast = pgs.ProcessFileDescriptorSetBidirectional(d, fdset) } else { ast = pgs.ProcessFileDescriptorSet(d, fdset) } if d.Failed() || d.Exited() { buf, _ := ioutil.ReadAll(d.Output()) t.Fatalf("failed to process fdset:\n%s", string(buf)) return nil } return ast } func (l Loader) resolveFS() afero.Fs { if l.FS == nil { return afero.NewOsFs() } return l.FS } func (l Loader) resolveProtoc(t T) string { if l.Protoc == "" { l.Protoc = "protoc" } path, err := exec.LookPath(l.Protoc) if err != nil { t.Fatalf("could not find executable protoc: %v", err) return l.Protoc } return path } func (l Loader) resolveArgs(tmpFile string, targets []string) []string { args := make([]string, 0, 4+len(targets)+2*len(l.ImportPaths)) args = append( args, "-o", tmpFile, "--include_imports", "--include_source_info", ) for _, imp := range l.ImportPaths { args = append(args, "-I", imp) } return append(args, targets...) } func (l Loader) resolveTargets(t T, files ...string) []string { fs := l.resolveFS() targets := make([]string, 0, len(files)) for _, file := range files { matches, err := afero.Glob(fs, file) if err != nil { t.Fatalf("could not resolve glob %q: %v", file, err) return nil } targets = append(targets, matches...) } if len(targets) == 0 { t.Fatal("no proto files specified") return nil } return targets } func (l Loader) withTempDir(t T, fn func(tempDir string)) { fs := l.resolveFS() tmpDir, err := afero.TempDir(fs, "", "pgs-testutils") if err != nil { t.Fatalf("could not create temp directory: %v", err) return } defer func() { if ferr := fs.RemoveAll(tmpDir); ferr != nil { t.Logf("failed to cleanup temp directory: %v", ferr) } }() fn(tmpDir) } protoc-gen-star-2.0.3/testutils/loader_test.go000066400000000000000000000210611440740147700214440ustar00rootroot00000000000000package testutils import ( "bytes" "errors" "fmt" "io" "os" "os/exec" "strings" "testing" "github.com/spf13/afero" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" descriptor "google.golang.org/protobuf/types/descriptorpb" ) func TestResolveProtoc(t *testing.T) { t.Parallel() lookupPath, lookupErr := exec.LookPath("protoc") t.Run("from PATH", func(t *testing.T) { t.Parallel() l := Loader{} mt := &mockT{} if lookupErr != nil { t.Skip("no protoc in PATH: ", lookupErr) return } assert.Equal(t, lookupPath, l.resolveProtoc(mt)) assert.False(t, mt.failed) }) t.Run("explicit", func(t *testing.T) { t.Parallel() if lookupErr != nil { t.Skip("no protoc in PATH: ", lookupErr) return } l := Loader{Protoc: lookupPath} mt := &mockT{} assert.Equal(t, lookupPath, l.resolveProtoc(mt)) assert.False(t, mt.failed) }) t.Run("invalid", func(t *testing.T) { t.Parallel() l := Loader{Protoc: "/this/is/not/a/real/protoc"} mt := &mockT{} l.resolveProtoc(mt) assert.True(t, mt.failed) }) } func TestResolveFS(t *testing.T) { t.Parallel() l := Loader{} assert.IsType(t, afero.NewOsFs(), l.resolveFS()) fs := afero.NewMemMapFs() l = Loader{FS: fs} assert.Equal(t, fs, l.resolveFS()) } func TestResolveTargets(t *testing.T) { t.Parallel() fs := afero.NewMemMapFs() require.NoError(t, fs.Mkdir("foo", 0755)) files := []string{ "foo/bar.proto", "foo/baz.proto", "fizz.proto", "buzz.proto", } for _, path := range files { f, err := fs.Create(path) require.NoError(t, err) require.NoError(t, f.Close()) } l := Loader{FS: fs} t.Run("empty", func(t *testing.T) { t.Parallel() mt := &mockT{} targets := l.resolveTargets(mt) assert.Empty(t, targets) assert.True(t, mt.failed) }) t.Run("empty glob", func(t *testing.T) { t.Parallel() mt := &mockT{} targets := l.resolveTargets(mt, "/not/a/real/*.proto") assert.Empty(t, targets) assert.True(t, mt.failed) }) t.Run("bad glob", func(t *testing.T) { t.Parallel() mt := &mockT{} targets := l.resolveTargets(mt, "[-]") assert.Empty(t, targets) assert.True(t, mt.failed) }) t.Run("success", func(t *testing.T) { t.Parallel() mt := &mockT{} targets := l.resolveTargets(mt, "*/*.proto", "fizz.proto") assert.Len(t, targets, 3) assert.Contains(t, targets, "foo/bar.proto") assert.Contains(t, targets, "foo/baz.proto") assert.Contains(t, targets, "fizz.proto") assert.False(t, mt.failed) }) } func TestResolveArgs(t *testing.T) { t.Parallel() l := Loader{ ImportPaths: []string{"/foo", "/bar"}, } args := l.resolveArgs("fdset.bin", []string{"fizz.proto", "buzz.proto"}) expected := []string{ "-o", "fdset.bin", "--include_imports", "--include_source_info", "-I", "/foo", "-I", "/bar", "fizz.proto", "buzz.proto", } assert.Equal(t, expected, args) } func TestWithTempDir(t *testing.T) { t.Parallel() t.Run("success", func(t *testing.T) { t.Parallel() fs := afero.NewMemMapFs() l := Loader{FS: fs} mt := &mockT{} var dir string l.withTempDir(mt, func(tempDir string) { info, err := fs.Stat(tempDir) require.NoError(t, err) assert.True(t, info.IsDir()) dir = tempDir }) assert.False(t, mt.failed) _, err := fs.Stat(dir) assert.Error(t, err) }) t.Run("failure", func(t *testing.T) { t.Parallel() fs := afero.NewMemMapFs() fs = afero.NewReadOnlyFs(fs) l := Loader{FS: fs} mt := &mockT{} l.withTempDir(mt, func(tempDir string) { assert.Fail(t, "should not have reached here") }) assert.True(t, mt.failed) }) t.Run("fail cleanup", func(t *testing.T) { t.Parallel() fs := afero.NewMemMapFs() fs = disallowRemoveAllFS{Fs: fs} l := Loader{FS: fs} mt := &mockT{} var executed bool l.withTempDir(mt, func(tempDir string) { executed = true }) assert.True(t, executed) assert.False(t, mt.failed) assert.NotEmpty(t, mt.log) }) } func TestLoader_LoadFDSetReader(t *testing.T) { t.Parallel() raw, err := proto.Marshal(dummyFDSet()) require.NoError(t, err) t.Run("success - no bidi", func(t *testing.T) { t.Parallel() mt := &mockT{} l := Loader{} r := bytes.NewReader(raw) ast := l.LoadFDSetReader(mt, r) assert.False(t, mt.failed) assert.NotNil(t, ast) assert.NotEmpty(t, ast.Packages()) }) t.Run("success - bidi", func(t *testing.T) { t.Parallel() mt := &mockT{} l := Loader{BiDirectional: true} r := bytes.NewReader(raw) ast := l.LoadFDSetReader(mt, r) assert.False(t, mt.failed) assert.NotNil(t, ast) assert.NotEmpty(t, ast.Packages()) }) t.Run("broken reader", func(t *testing.T) { t.Parallel() mt := &mockT{} r := brokenReader{} l := Loader{} ast := l.LoadFDSetReader(mt, r) assert.Nil(t, ast) assert.True(t, mt.failed) }) t.Run("unmarshal error", func(t *testing.T) { t.Parallel() mt := &mockT{} r := strings.NewReader("this is not a fdset") l := Loader{} ast := l.LoadFDSetReader(mt, r) assert.Nil(t, ast) assert.True(t, mt.failed) }) t.Run("process error", func(t *testing.T) { typ := descriptor.FieldDescriptorProto_TYPE_MESSAGE msg := &descriptor.DescriptorProto{ Name: proto.String("SomeMsg"), Field: []*descriptor.FieldDescriptorProto{{ Name: proto.String("SomeName"), Type: &typ, TypeName: proto.String(".some.unknown.Message"), }}, } fdset := dummyFDSet() fdset.File[0].MessageType = []*descriptor.DescriptorProto{msg} b, pberr := proto.Marshal(fdset) require.NoError(t, pberr) mt := &mockT{} r := bytes.NewReader(b) l := Loader{} ast := l.LoadFDSetReader(mt, r) assert.Nil(t, ast) assert.True(t, mt.failed) }) } func TestLoader_LoadFDSet(t *testing.T) { t.Parallel() raw, err := proto.Marshal(dummyFDSet()) require.NoError(t, err) fs := afero.NewMemMapFs() path := "/fdset.bin" err = afero.WriteFile(fs, path, raw, 0644) require.NoError(t, err) l := Loader{FS: fs} t.Run("success", func(t *testing.T) { t.Parallel() mt := &mockT{} ast := l.LoadFDSet(mt, path) assert.NotNil(t, ast) assert.False(t, mt.failed) }) t.Run("cannot open", func(t *testing.T) { t.Parallel() mt := &mockT{} ast := l.LoadFDSet(mt, "/not-a-real.proto") assert.Nil(t, ast) assert.True(t, mt.failed) }) t.Run("cannot close file", func(t *testing.T) { t.Parallel() mt := &mockT{} ldr := Loader{FS: disallowCloseFileFS{fs}} ast := ldr.LoadFDSet(mt, path) assert.NotNil(t, ast) assert.False(t, mt.failed) assert.NotEmpty(t, mt.log) }) } func TestLoader_LoadProtos(t *testing.T) { t.Parallel() if _, err := exec.LookPath("protoc"); err != nil { t.Skip("protoc not found in PATH") return } t.Run("non OS FS", func(t *testing.T) { t.Parallel() l := Loader{FS: afero.NewMemMapFs()} mt := &mockT{} ast := l.LoadProtos(mt, "*.proto") assert.Nil(t, ast) assert.True(t, mt.failed) }) t.Run("success", func(t *testing.T) { t.Parallel() l := Loader{ImportPaths: []string{"../testdata/protos"}} mt := &mockT{} ast := l.LoadProtos(mt, "../testdata/protos/kitchen/*.proto") assert.NotNil(t, ast) assert.False(t, mt.failed) }) t.Run("protoc error", func(t *testing.T) { t.Parallel() l := Loader{} mt := &mockT{} ast := l.LoadProtos(mt, "../testdata/protos/kitchen/kitchen.proto") assert.Nil(t, ast) assert.True(t, mt.failed) }) } func dummyFDSet() *descriptor.FileDescriptorSet { f := &descriptor.FileDescriptorProto{ Name: proto.String("foo.proto"), Package: proto.String("testutil"), Syntax: proto.String("proto3"), } return &descriptor.FileDescriptorSet{ File: []*descriptor.FileDescriptorProto{f}, } } type mockT struct { log string failed bool } func (m *mockT) Logf(format string, args ...interface{}) { m.log = fmt.Sprintf(format, args...) } func (m *mockT) Fatal(args ...interface{}) { m.failed = true m.log = fmt.Sprint(args...) } func (m *mockT) Fatalf(format string, args ...interface{}) { m.failed = true m.log = fmt.Sprintf(format, args...) } type disallowRemoveAllFS struct { afero.Fs } func (disallowRemoveAllFS) RemoveAll(_ string) error { return os.ErrPermission } type brokenReader struct{} func (b brokenReader) Read(_ []byte) (int, error) { return 0, io.ErrUnexpectedEOF } type disallowCloseFile struct { afero.File } func (disallowCloseFile) Close() error { return errors.New("cannot close file") } type disallowCloseFileFS struct { afero.Fs } func (fs disallowCloseFileFS) Open(path string) (afero.File, error) { file, err := fs.Fs.Open(path) if err != nil { return file, err } return disallowCloseFile{file}, nil } var ( _ T = (*testing.T)(nil) _ T = (*testing.B)(nil) ) protoc-gen-star-2.0.3/tools.go000066400000000000000000000001561440740147700162410ustar00rootroot00000000000000//go:build tools // +build tools package tools import ( _ "google.golang.org/protobuf/cmd/protoc-gen-go" ) protoc-gen-star-2.0.3/wkt.go000066400000000000000000000046121440740147700157070ustar00rootroot00000000000000package pgs // WellKnownTypePackage is the proto package name where all Well Known Types // currently reside. const WellKnownTypePackage Name = "google.protobuf" // WellKnownType (WKT) encapsulates the Name of a Message from the // `google.protobuf` package. Most official protoc plugins special case code // generation on these messages. type WellKnownType Name // 1-to-1 mapping of the WKT names to WellKnownTypes. const ( // UnknownWKT indicates that the type is not a known WKT. This value may be // returned erroneously mapping a Name to a WellKnownType or if a WKT is // added to the `google.protobuf` package but this library is outdated. UnknownWKT WellKnownType = "Unknown" AnyWKT WellKnownType = "Any" DurationWKT WellKnownType = "Duration" EmptyWKT WellKnownType = "Empty" StructWKT WellKnownType = "Struct" TimestampWKT WellKnownType = "Timestamp" ValueWKT WellKnownType = "Value" ListValueWKT WellKnownType = "ListValue" DoubleValueWKT WellKnownType = "DoubleValue" FloatValueWKT WellKnownType = "FloatValue" Int64ValueWKT WellKnownType = "Int64Value" UInt64ValueWKT WellKnownType = "UInt64Value" Int32ValueWKT WellKnownType = "Int32Value" UInt32ValueWKT WellKnownType = "UInt32Value" BoolValueWKT WellKnownType = "BoolValue" StringValueWKT WellKnownType = "StringValue" BytesValueWKT WellKnownType = "BytesValue" ) var wktLookup = map[Name]WellKnownType{ "Any": AnyWKT, "Duration": DurationWKT, "Empty": EmptyWKT, "Struct": StructWKT, "Timestamp": TimestampWKT, "Value": ValueWKT, "ListValue": ListValueWKT, "DoubleValue": DoubleValueWKT, "FloatValue": FloatValueWKT, "Int64Value": Int64ValueWKT, "UInt64Value": UInt64ValueWKT, "Int32Value": Int32ValueWKT, "UInt32Value": UInt32ValueWKT, "BoolValue": BoolValueWKT, "StringValue": StringValueWKT, "BytesValue": BytesValueWKT, } // LookupWKT returns the WellKnownType related to the provided Name. If the // name is not recognized, UnknownWKT is returned. func LookupWKT(n Name) WellKnownType { if wkt, ok := wktLookup[n]; ok { return wkt } return UnknownWKT } // Name converts the WellKnownType to a Name. This is a convenience method. func (wkt WellKnownType) Name() Name { return Name(wkt) } // Valid returns true if the WellKnownType is recognized by this library. func (wkt WellKnownType) Valid() bool { _, ok := wktLookup[wkt.Name()] return ok } protoc-gen-star-2.0.3/wkt_test.go000066400000000000000000000017521440740147700167500ustar00rootroot00000000000000package pgs import ( "testing" "github.com/stretchr/testify/assert" ) func TestLookupWKT(t *testing.T) { t.Parallel() tests := []struct { name Name expected WellKnownType }{ {"Any", AnyWKT}, {"Duration", DurationWKT}, {"Empty", EmptyWKT}, {"Foobar", UnknownWKT}, } for _, test := range tests { tc := test t.Run(tc.name.String(), func(t *testing.T) { t.Parallel() assert.Equal(t, tc.expected, LookupWKT(tc.name)) }) } } func TestWellKnownType_Name(t *testing.T) { t.Parallel() wkt := WellKnownType("Foobar") assert.Equal(t, Name("Foobar"), wkt.Name()) } func TestWellKnownType_Valid(t *testing.T) { t.Parallel() tests := []struct { wkt WellKnownType expected bool }{ {AnyWKT, true}, {Int64ValueWKT, true}, {UnknownWKT, false}, {WellKnownType("Foobar"), false}, } for _, test := range tests { tc := test t.Run(tc.wkt.Name().String(), func(t *testing.T) { t.Parallel() assert.Equal(t, tc.expected, tc.wkt.Valid()) }) } } protoc-gen-star-2.0.3/workflow.go000066400000000000000000000050021440740147700167460ustar00rootroot00000000000000package pgs import ( "io/ioutil" "sync" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) type workflow interface { Init(*Generator) AST Run(AST) []Artifact Persist([]Artifact) } // standardWorkflow describes a typical protoc-plugin flow, with the only // exception being the behavior of the persister directly writing custom file // artifacts to disk (instead of via the plugin's output to protoc). type standardWorkflow struct { *Generator BiDi bool } func (wf *standardWorkflow) Init(g *Generator) AST { wf.Generator = g wf.Debug("reading input") data, err := ioutil.ReadAll(g.in) wf.CheckErr(err, "reading input") wf.Debug("parsing input proto") req := new(plugin_go.CodeGeneratorRequest) err = proto.Unmarshal(data, req) wf.CheckErr(err, "parsing input proto") wf.Assert(len(req.FileToGenerate) > 0, "no files to generate") wf.Debug("parsing command-line params") wf.params = ParseParameters(req.GetParameter()) for _, pm := range wf.paramMutators { pm(wf.params) } if wf.BiDi { return ProcessCodeGeneratorRequestBidirectional(g, req) } return ProcessCodeGeneratorRequest(g, req) } func (wf *standardWorkflow) Run(ast AST) (arts []Artifact) { ctx := Context(wf.Debugger, wf.params, wf.params.OutputPath()) wf.Debug("initializing modules") for _, m := range wf.mods { m.InitContext(ctx.Push(m.Name())) } wf.Debug("executing modules") for _, m := range wf.mods { arts = append(arts, m.Execute(ast.Targets(), ast.Packages())...) } return } func (wf *standardWorkflow) Persist(arts []Artifact) { resp := wf.persister.Persist(arts...) data, err := proto.Marshal(resp) wf.CheckErr(err, "marshaling output proto") n, err := wf.out.Write(data) wf.CheckErr(err, "writing output proto") wf.Assert(len(data) == n, "failed to write all output") wf.Debug("rendering successful") } // onceWorkflow wraps an existing workflow, executing its methods exactly // once. Subsequent calls will ignore their inputs and use the previously // provided values. type onceWorkflow struct { workflow initOnce sync.Once ast AST runOnce sync.Once arts []Artifact persistOnce sync.Once } func (wf *onceWorkflow) Init(g *Generator) AST { wf.initOnce.Do(func() { wf.ast = wf.workflow.Init(g) }) return wf.ast } func (wf *onceWorkflow) Run(ast AST) []Artifact { wf.runOnce.Do(func() { wf.arts = wf.workflow.Run(ast) }) return wf.arts } func (wf *onceWorkflow) Persist(artifacts []Artifact) { wf.persistOnce.Do(func() { wf.workflow.Persist(artifacts) }) } protoc-gen-star-2.0.3/workflow_test.go000066400000000000000000000041651440740147700200160ustar00rootroot00000000000000package pgs import ( "bytes" "io/ioutil" "testing" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/proto" plugin_go "google.golang.org/protobuf/types/pluginpb" ) func TestStandardWorkflow_Init(t *testing.T) { t.Parallel() req := &plugin_go.CodeGeneratorRequest{FileToGenerate: []string{"foo"}} b, err := proto.Marshal(req) assert.NoError(t, err) mutated := false g := Init(ProtocInput(bytes.NewReader(b)), MutateParams(func(p Parameters) { mutated = true })) g.workflow.Init(g) assert.True(t, mutated) t.Run("bidi", func(t *testing.T) { mutated = false g = Init(ProtocInput(bytes.NewReader(b)), BiDirectional(), MutateParams(func(p Parameters) { mutated = true })) g.workflow.Init(g) assert.True(t, mutated) }) } func TestStandardWorkflow_Run(t *testing.T) { t.Parallel() g := Init() g.workflow = &standardWorkflow{Generator: g} g.params = Parameters{} m := newMockModule() m.name = "foo" g.RegisterModule(m) g.workflow.Run(&graph{}) assert.True(t, m.executed) } func TestStandardWorkflow_Persist(t *testing.T) { t.Parallel() g := Init(ProtocOutput(ioutil.Discard)) g.workflow = &standardWorkflow{Generator: g} g.persister = dummyPersister(g.Debugger) assert.NotPanics(t, func() { g.workflow.Persist(nil) }) } func TestOnceWorkflow(t *testing.T) { t.Parallel() d := &dummyWorkflow{ AST: &graph{}, Artifacts: []Artifact{&CustomFile{}}, } wf := &onceWorkflow{workflow: d} ast := wf.Init(nil) arts := wf.Run(ast) wf.Persist(arts) assert.True(t, d.initted) assert.True(t, d.run) assert.True(t, d.persisted) d = &dummyWorkflow{} wf.workflow = d assert.Equal(t, ast, wf.Init(nil)) assert.Equal(t, arts, wf.Run(ast)) wf.Persist(arts) assert.False(t, d.initted) assert.False(t, d.run) assert.False(t, d.persisted) } type dummyWorkflow struct { AST AST Artifacts []Artifact initted, run, persisted bool } func (wf *dummyWorkflow) Init(g *Generator) AST { wf.initted = true; return wf.AST } func (wf *dummyWorkflow) Run(ast AST) []Artifact { wf.run = true; return wf.Artifacts } func (wf *dummyWorkflow) Persist(arts []Artifact) { wf.persisted = true }