diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000..ebc6d86 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,8 @@ +module.exports = { + extends: ["plugin:@einride/default"], + rules: { + "jest/no-deprecated-functions": "off", // we're not using Jest + "prettier/prettier": "off", // we're not concerned with code style + "@typescript-eslint/ban-ts-comment": "off", // we need ts comment in generated files to disable type checking of files for consumers + }, +}; diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f668a99 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.idea +.vscode +.semrel +.generated-go-semantic-release-changelog.md diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 0000000..b644110 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,25 @@ +before: + hooks: + - go mod download + +builds: + - id: protoc-gen-typescript-http + binary: protoc-gen-typescript-http + dir: . + main: main.go + env: + - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin + +checksum: + name_template: "checksums.txt" + +snapshot: + name_template: "{{ .Tag }}-next" + +release: + github: + prerelease: auto diff --git a/.sage/einrideeslint.go b/.sage/einrideeslint.go new file mode 100644 index 0000000..ca4c423 --- /dev/null +++ b/.sage/einrideeslint.go @@ -0,0 +1,62 @@ +package main + +import ( + "context" + "os" + "os/exec" + "path/filepath" + + "go.einride.tech/sage/sg" + "go.einride.tech/sage/sgtool" +) + +const ( + name = "eslint" + packageJSONContent = `{ + "dependencies": { + "@einride/eslint-plugin": "4.2.0", + "eslint": "8.5.0" + } +}` +) + +func eslintCommand(ctx context.Context, args ...string) *exec.Cmd { + sg.Deps(ctx, prepareEslintCommand) + // eslint plugins should be resolved from the tool dir + defaultArgs := []string{ + "--resolve-plugins-relative-to", + sg.FromToolsDir(name), + } + cmd := sg.Command(ctx, sg.FromBinDir(name), append(defaultArgs, args...)...) + return cmd +} + +func prepareEslintCommand(ctx context.Context) error { + toolDir := sg.FromToolsDir(name) + binary := filepath.Join(toolDir, "node_modules", ".bin", name) + packageJSON := filepath.Join(toolDir, "package.json") + if err := os.MkdirAll(toolDir, 0o755); err != nil { + return err + } + if err := os.WriteFile(packageJSON, []byte(packageJSONContent), 0o600); err != nil { + return err + } + sg.Logger(ctx).Println("installing packages...") + if err := sg.Command( + ctx, + "npm", + "--silent", + "install", + "--prefix", + toolDir, + "--no-save", + "--no-audit", + "--ignore-script", + ).Run(); err != nil { + return err + } + if _, err := sgtool.CreateSymlink(binary); err != nil { + return err + } + return nil +} diff --git a/.sage/go.mod b/.sage/go.mod new file mode 100644 index 0000000..6821ad1 --- /dev/null +++ b/.sage/go.mod @@ -0,0 +1,5 @@ +module sage + +go 1.22 + +require go.einride.tech/sage v0.334.0 diff --git a/.sage/go.sum b/.sage/go.sum new file mode 100644 index 0000000..f1adae2 --- /dev/null +++ b/.sage/go.sum @@ -0,0 +1,2 @@ +go.einride.tech/sage v0.334.0 h1:HY/u8jM2UQ/BerH+vk7JtXf2hg6J0z99Fh0YAQ4QeaE= +go.einride.tech/sage v0.334.0/go.mod h1:EzV5uciFX7/2ho8EKB5K9JghOfXIxlzs694b+Tkl5GQ= diff --git a/.sage/main.go b/.sage/main.go new file mode 100644 index 0000000..757ad6e --- /dev/null +++ b/.sage/main.go @@ -0,0 +1,121 @@ +package main + +import ( + "context" + + "go.einride.tech/sage/sg" + "go.einride.tech/sage/tools/sgconvco" + "go.einride.tech/sage/tools/sggit" + "go.einride.tech/sage/tools/sggo" + "go.einride.tech/sage/tools/sggolangcilint" + "go.einride.tech/sage/tools/sggoreleaser" + "go.einride.tech/sage/tools/sggosemanticrelease" + "go.einride.tech/sage/tools/sgmdformat" + "go.einride.tech/sage/tools/sgyamlfmt" +) + +func main() { + sg.GenerateMakefiles( + sg.Makefile{ + Path: sg.FromGitRoot("Makefile"), + DefaultTarget: All, + }, + sg.Makefile{ + Namespace: Proto{}, + DefaultTarget: Proto.All, + Path: sg.FromGitRoot("examples", "proto", "Makefile"), + }, + ) +} + +func All(ctx context.Context) error { + sg.Deps(ctx, ConvcoCheck, GoLint, GoTest, FormatMarkdown, FormatYAML) + sg.SerialDeps(ctx, Proto.All, TypescriptLint) + sg.SerialDeps(ctx, GoModTidy, GitVerifyNoDiff) + return nil +} + +func FormatYAML(ctx context.Context) error { + sg.Logger(ctx).Println("formatting YAML files...") + return sgyamlfmt.Run(ctx) +} + +func GoModTidy(ctx context.Context) error { + sg.Logger(ctx).Println("tidying Go module files...") + return sg.Command(ctx, "go", "mod", "tidy", "-v").Run() +} + +func GoTest(ctx context.Context) error { + sg.Logger(ctx).Println("running Go tests...") + return sggo.TestCommand(ctx).Run() +} + +func GoLint(ctx context.Context) error { + sg.Logger(ctx).Println("linting Go files...") + return sggolangcilint.Run(ctx) +} + +func GoLintFix(ctx context.Context) error { + sg.Logger(ctx).Println("fixing Go files...") + return sggolangcilint.Fix(ctx) +} + +func FormatMarkdown(ctx context.Context) error { + sg.Logger(ctx).Println("formatting Markdown files...") + return sgmdformat.Command(ctx).Run() +} + +func ConvcoCheck(ctx context.Context) error { + sg.Logger(ctx).Println("checking git commits...") + return sgconvco.Command(ctx, "check", "origin/master..HEAD").Run() +} + +func GitVerifyNoDiff(ctx context.Context) error { + sg.Logger(ctx).Println("verifying that git has no diff...") + return sggit.VerifyNoDiff(ctx) +} + +func TypescriptLint(ctx context.Context) error { + sg.Logger(ctx).Println("linting typescript files...") + return eslintCommand( + ctx, + "--config", + sg.FromGitRoot(".eslintrc.js"), + "--quiet", + "examples/proto/gen/typescript/**/*.ts", + ).Run() +} + +func SemanticRelease(ctx context.Context, repo string, dry bool) error { + sg.Logger(ctx).Println("triggering release...") + args := []string{ + "--allow-initial-development-versions", + "--allow-no-changes", + "--ci-condition=default", + "--provider=github", + "--provider-opt=slug=" + repo, + } + if dry { + args = append(args, "--dry") + } + return sggosemanticrelease.Command(ctx, args...).Run() +} + +func GoReleaser(ctx context.Context, snapshot bool) error { + sg.Logger(ctx).Println("building Go binary releases...") + if err := sggit.Command(ctx, "fetch", "--force", "--tags").Run(); err != nil { + return err + } + args := []string{ + "release", + "--clean", + } + if len(sggit.Tags(ctx)) == 0 && !snapshot { + sg.Logger(ctx).Printf("no git tag found for %s, forcing snapshot mode", sggit.ShortSHA(ctx)) + snapshot = true + } + if snapshot { + args = append(args, "--snapshot") + } + return sggoreleaser.Command(ctx, args...).Run() +} diff --git a/.sage/proto.go b/.sage/proto.go new file mode 100644 index 0000000..0cb9d4e --- /dev/null +++ b/.sage/proto.go @@ -0,0 +1,56 @@ +package main + +import ( + "context" + + "go.einride.tech/sage/sg" + "go.einride.tech/sage/tools/sgbuf" +) + +type Proto sg.Namespace + +func (Proto) All(ctx context.Context) error { + sg.SerialDeps(ctx, Proto.Build) + sg.Deps(ctx, Proto.BufLint, Proto.BufFormat, Proto.BufGenerate) + return nil +} + +func (Proto) Build(ctx context.Context) error { + sg.Logger(ctx).Println("installing binary...") + return sg.Command( + ctx, + "go", + "build", + "-o", + sg.FromBinDir("protoc-gen-typescript-http"), + ".", + ).Run() +} + +func (Proto) BufLint(ctx context.Context) error { + sg.Logger(ctx).Println("linting proto files...") + cmd := sgbuf.Command(ctx, "lint") + cmd.Dir = sg.FromGitRoot("examples", "proto") + return cmd.Run() +} + +func (Proto) BufFormat(ctx context.Context) error { + sg.Logger(ctx).Println("formatting proto files...") + cmd := sgbuf.Command(ctx, "format", "--write") + cmd.Dir = sg.FromGitRoot("examples", "proto") + return cmd.Run() +} + +func (Proto) BufGenerate(ctx context.Context) error { + sg.Logger(ctx).Println("generating from proto files...") + cmd := sgbuf.Command( + ctx, + "generate", + "--template", + "buf.gen.yaml", + "--path", + "einride", + ) + cmd.Dir = sg.FromGitRoot("examples", "proto") + return cmd.Run() +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..a8f4ccb --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of + any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, + without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +open-source@einride.tech. + +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the +[Contributor Covenant](https://www.contributor-covenant.org), version 2.0, +available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f99e5fd --- /dev/null +++ b/LICENSE @@ -0,0 +1,19 @@ +Copyright 2020 Einride AB + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..1e9179e --- /dev/null +++ b/Makefile @@ -0,0 +1,105 @@ +# Code generated by go.einride.tech/sage. DO NOT EDIT. +# To learn more, see .sage/main.go and https://github.com/einride/sage. + +.DEFAULT_GOAL := all + +cwd := $(dir $(realpath $(firstword $(MAKEFILE_LIST)))) +sagefile := $(abspath $(cwd)/.sage/bin/sagefile) + +# Setup Go. +go := $(shell command -v go 2>/dev/null) +export GOWORK ?= off +ifndef go +SAGE_GO_VERSION ?= 1.20.2 +export GOROOT := $(abspath $(cwd)/.sage/tools/go/$(SAGE_GO_VERSION)/go) +export PATH := $(PATH):$(GOROOT)/bin +go := $(GOROOT)/bin/go +os := $(shell uname | tr '[:upper:]' '[:lower:]') +arch := $(shell uname -m) +ifeq ($(arch),x86_64) +arch := amd64 +endif +$(go): + $(info installing Go $(SAGE_GO_VERSION)...) + @mkdir -p $(dir $(GOROOT)) + @curl -sSL https://go.dev/dl/go$(SAGE_GO_VERSION).$(os)-$(arch).tar.gz | tar xz -C $(dir $(GOROOT)) + @touch $(GOROOT)/go.mod + @chmod +x $(go) +endif + +.PHONY: $(sagefile) +$(sagefile): $(go) + @cd .sage && $(go) mod tidy && $(go) run . + +.PHONY: sage +sage: + @$(MAKE) $(sagefile) + +.PHONY: update-sage +update-sage: $(go) + @cd .sage && $(go) get -d go.einride.tech/sage@latest && $(go) mod tidy && $(go) run . + +.PHONY: clean-sage +clean-sage: + @git clean -fdx .sage/tools .sage/bin .sage/build + +.PHONY: all +all: $(sagefile) + @$(sagefile) All + +.PHONY: convco-check +convco-check: $(sagefile) + @$(sagefile) ConvcoCheck + +.PHONY: format-markdown +format-markdown: $(sagefile) + @$(sagefile) FormatMarkdown + +.PHONY: format-yaml +format-yaml: $(sagefile) + @$(sagefile) FormatYAML + +.PHONY: git-verify-no-diff +git-verify-no-diff: $(sagefile) + @$(sagefile) GitVerifyNoDiff + +.PHONY: go-lint +go-lint: $(sagefile) + @$(sagefile) GoLint + +.PHONY: go-lint-fix +go-lint-fix: $(sagefile) + @$(sagefile) GoLintFix + +.PHONY: go-mod-tidy +go-mod-tidy: $(sagefile) + @$(sagefile) GoModTidy + +.PHONY: go-releaser +go-releaser: $(sagefile) +ifndef snapshot + $(error missing argument snapshot="...") +endif + @$(sagefile) GoReleaser "$(snapshot)" + +.PHONY: go-test +go-test: $(sagefile) + @$(sagefile) GoTest + +.PHONY: semantic-release +semantic-release: $(sagefile) +ifndef repo + $(error missing argument repo="...") +endif +ifndef dry + $(error missing argument dry="...") +endif + @$(sagefile) SemanticRelease "$(repo)" "$(dry)" + +.PHONY: typescript-lint +typescript-lint: $(sagefile) + @$(sagefile) TypescriptLint + +.PHONY: proto +proto: + $(MAKE) -C examples/proto -f Makefile diff --git a/README.md b/README.md index afa826e..b0f80a9 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,54 @@ -# protoc-gen-ts +# protoc-gen-typescript-http -protoc generate typescript by http \ No newline at end of file +Generates Typescript types and service clients from protobuf definitions +annotated with +[http rules](https://github.com/googleapis/googleapis/blob/master/google/api/http.proto). +The generated types follow the +[canonical JSON encoding](https://developers.google.com/protocol-buffers/docs/proto3#json). + +**Experimental**: This library is under active development and breaking changes +to config files, APIs and generated code are expected between releases. + +## Using the plugin + +For examples of correctly annotated protobuf defintions and the generated code, +look at [examples](./examples). + +### Install the plugin + +```bash +go get git.apinb.com/bsm-tools/protoc-gen-ts +``` + +Or download a prebuilt binary from [releases](./releases). + +### Invocation + +```bash +protoc + --typescript-http_out [OUTPUT DIR] \ + [.proto files ...] +``` + +______________________________________________________________________ + +The generated clients can be used with any HTTP client that returns a Promise +containing JSON data. + +```typescript +const rootUrl = "..."; + +type Request = { + path: string, + method: string, + body: string | null +} + +function fetchRequestHandler({path, method, body}: Request) { + return fetch(rootUrl + path, {method, body}).then(response => response.json()) +} + +export function siteClient() { + return createShipperServiceClient(fetchRequestHandler); +} +``` diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..cb13c98 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,41 @@ +# Security Policy + +Einride welcomes feedback from security researchers and the general public to +help improve our security. If you believe you have discovered a vulnerability, +privacy issue, exposed data, or other security issues in relation to this +project, we want to hear from you. This policy outlines steps for reporting +security issues to us, what we expect, and what you can expect from us. + +## Supported versions + +We release patches for security issues according to semantic versioning. This +project is currently unstable (v0.x) and only the latest version will receive +security patches. + +## Reporting a vulnerability + +Please do not report security vulnerabilities through public issues, +discussions, or change requests. + +Please report security issues via [oss-security@einride.tech][email]. Provide +all relevant information, including steps to reproduce the issue, any affected +versions, and known mitigations. The more details you provide, the easier it +will be for us to triage and fix the issue. You will receive a response from us +within 2 business days. If the issue is confirmed, a patch will be released as +soon as possible. + +For more information, or security issues not relating to open source code, +please consult our [Vulnerability Disclosure Policy][vdp]. + +## Preferred languages + +English is our preferred language of communication. + +## Contributions and recognition + +We appreciate every contribution and will do our best to publicly +[acknowledge][acknowledgments] your contributions. + +[acknowledgments]: https://einride.tech/security-acknowledgments.txt +[email]: mailto:oss-security@einride.tech +[vdp]: https://www.einride.tech/vulnerability-disclosure-policy diff --git a/examples/proto/.gitignore b/examples/proto/.gitignore new file mode 100644 index 0000000..cb93f36 --- /dev/null +++ b/examples/proto/.gitignore @@ -0,0 +1,2 @@ +.build/ +.tools/*/*/ diff --git a/examples/proto/Makefile b/examples/proto/Makefile new file mode 100644 index 0000000..438a2ef --- /dev/null +++ b/examples/proto/Makefile @@ -0,0 +1,64 @@ +# Code generated by go.einride.tech/sage. DO NOT EDIT. +# To learn more, see ../../.sage/main.go and https://github.com/einride/sage. + +.DEFAULT_GOAL := all + +cwd := $(dir $(realpath $(firstword $(MAKEFILE_LIST)))) +sagefile := $(abspath $(cwd)/../../.sage/bin/sagefile) + +# Setup Go. +go := $(shell command -v go 2>/dev/null) +export GOWORK ?= off +ifndef go +SAGE_GO_VERSION ?= 1.20.2 +export GOROOT := $(abspath $(cwd)/../../.sage/tools/go/$(SAGE_GO_VERSION)/go) +export PATH := $(PATH):$(GOROOT)/bin +go := $(GOROOT)/bin/go +os := $(shell uname | tr '[:upper:]' '[:lower:]') +arch := $(shell uname -m) +ifeq ($(arch),x86_64) +arch := amd64 +endif +$(go): + $(info installing Go $(SAGE_GO_VERSION)...) + @mkdir -p $(dir $(GOROOT)) + @curl -sSL https://go.dev/dl/go$(SAGE_GO_VERSION).$(os)-$(arch).tar.gz | tar xz -C $(dir $(GOROOT)) + @touch $(GOROOT)/go.mod + @chmod +x $(go) +endif + +.PHONY: $(sagefile) +$(sagefile): $(go) + @cd ../../.sage && $(go) mod tidy && $(go) run . + +.PHONY: sage +sage: + @$(MAKE) $(sagefile) + +.PHONY: update-sage +update-sage: $(go) + @cd ../../.sage && $(go) get -d go.einride.tech/sage@latest && $(go) mod tidy && $(go) run . + +.PHONY: clean-sage +clean-sage: + @git clean -fdx ../../.sage/tools ../../.sage/bin ../../.sage/build + +.PHONY: all +all: $(sagefile) + @$(sagefile) Proto:All + +.PHONY: buf-format +buf-format: $(sagefile) + @$(sagefile) Proto:BufFormat + +.PHONY: buf-generate +buf-generate: $(sagefile) + @$(sagefile) Proto:BufGenerate + +.PHONY: buf-lint +buf-lint: $(sagefile) + @$(sagefile) Proto:BufLint + +.PHONY: build +build: $(sagefile) + @$(sagefile) Proto:Build diff --git a/examples/proto/buf.gen.yaml b/examples/proto/buf.gen.yaml new file mode 100644 index 0000000..649b478 --- /dev/null +++ b/examples/proto/buf.gen.yaml @@ -0,0 +1,5 @@ +version: v1 + +plugins: + - name: typescript-http + out: gen/typescript diff --git a/examples/proto/buf.lock b/examples/proto/buf.lock new file mode 100644 index 0000000..e908553 --- /dev/null +++ b/examples/proto/buf.lock @@ -0,0 +1,7 @@ +# Generated by buf. DO NOT EDIT. +version: v1 +deps: + - remote: buf.build + owner: googleapis + repository: googleapis + commit: d1263fe26f8e430a967dc22a4d0cad18 diff --git a/examples/proto/buf.yaml b/examples/proto/buf.yaml new file mode 100644 index 0000000..1e7a817 --- /dev/null +++ b/examples/proto/buf.yaml @@ -0,0 +1,15 @@ +version: v1 + +name: buf.build/einride/protoc-gen-typescript-http + +deps: + - buf.build/googleapis/googleapis + +lint: + use: + - DEFAULT + except: + - RPC_REQUEST_STANDARD_NAME + - RPC_RESPONSE_STANDARD_NAME + - RPC_REQUEST_RESPONSE_UNIQUE + - ENUM_VALUE_PREFIX diff --git a/examples/proto/einride/example/freight/v1/freight_service.proto b/examples/proto/einride/example/freight/v1/freight_service.proto new file mode 100644 index 0000000..44804d2 --- /dev/null +++ b/examples/proto/einride/example/freight/v1/freight_service.proto @@ -0,0 +1,371 @@ +syntax = "proto3"; + +package einride.example.freight.v1; + +import "einride/example/freight/v1/shipment.proto"; +import "einride/example/freight/v1/shipper.proto"; +import "einride/example/freight/v1/site.proto"; +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/field_mask.proto"; + +// This API represents a simple freight service. +// +// It defines the following resource model: +// +// - The API has a collection of [Shipper][einride.example.freight.v1.Shipper] +// resources, named `shippers/*` +// +// - Each Shipper has a collection of [Site][einride.example.freight.v1.Site] +// resources, named `shippers/*/sites/*` +// +// - Each Shipper has a collection of [Shipment][einride.example.freight.v1.Shipment] +// resources, named `shippers/*/shipments/*` +service FreightService { + option (google.api.default_host) = "freight-example.einride.tech"; + + // Get a shipper. + // See: https://google.aip.dev/131 (Standard methods: Get). + rpc GetShipper(GetShipperRequest) returns (Shipper) { + option (google.api.http) = {get: "/v1/{name=shippers/*}"}; + option (google.api.method_signature) = "name"; + } + + // List shippers. + // See: https://google.aip.dev/132 (Standard methods: List). + rpc ListShippers(ListShippersRequest) returns (ListShippersResponse) { + option (google.api.http) = {get: "/v1/shippers"}; + } + + // Create a shipper. + // See: https://google.aip.dev/133 (Standard methods: Create). + rpc CreateShipper(CreateShipperRequest) returns (Shipper) { + option (google.api.http) = { + post: "/v1/shippers" + body: "shipper" + }; + option (google.api.method_signature) = "shipper"; + } + + // Update a shipper. + // See: https://google.aip.dev/134 (Standard methods: Update). + rpc UpdateShipper(UpdateShipperRequest) returns (Shipper) { + option (google.api.http) = { + patch: "/v1/{shipper.name=shippers/*}" + body: "shipper" + }; + option (google.api.method_signature) = "shipper,update_mask"; + } + + // Delete a shipper. + // See: https://google.aip.dev/135 (Standard methods: Delete). + // See: https://google.aip.dev/164 (Soft delete). + rpc DeleteShipper(DeleteShipperRequest) returns (Shipper) { + option (google.api.http) = {delete: "/v1/{name=shippers/*}"}; + option (google.api.method_signature) = "name"; + } + + // Get a site. + // See: https://google.aip.dev/131 (Standard methods: Get). + rpc GetSite(GetSiteRequest) returns (Site) { + option (google.api.http) = {get: "/v1/{name=shippers/*/sites/*}"}; + option (google.api.method_signature) = "name"; + } + + // List sites for a shipper. + // See: https://google.aip.dev/132 (Standard methods: List). + rpc ListSites(ListSitesRequest) returns (ListSitesResponse) { + option (google.api.http) = {get: "/v1/{parent=shippers/*}/sites"}; + option (google.api.method_signature) = "parent"; + } + + // Create a site. + // See: https://google.aip.dev/133 (Standard methods: Create). + rpc CreateSite(CreateSiteRequest) returns (Site) { + option (google.api.http) = { + post: "/v1/{parent=shippers/*}/sites" + body: "site" + }; + option (google.api.method_signature) = "parent,site"; + } + + // Update a site. + // See: https://google.aip.dev/134 (Standard methods: Update). + rpc UpdateSite(UpdateSiteRequest) returns (Site) { + option (google.api.http) = { + patch: "/v1/{site.name=shippers/*/sites/*}" + body: "site" + }; + option (google.api.method_signature) = "site,update_mask"; + } + + // Delete a site. + // See: https://google.aip.dev/135 (Standard methods: Delete). + // See: https://google.aip.dev/164 (Soft delete). + rpc DeleteSite(DeleteSiteRequest) returns (Site) { + option (google.api.http) = {delete: "/v1/{name=shippers/*/sites/*}"}; + option (google.api.method_signature) = "name"; + } + + // Get a shipment. + // See: https://google.aip.dev/131 (Standard methods: Get). + rpc GetShipment(GetShipmentRequest) returns (Shipment) { + option (google.api.http) = {get: "/v1/{name=shippers/*/shipments/*}"}; + option (google.api.method_signature) = "name"; + } + + // List shipments for a shipper. + // See: https://google.aip.dev/132 (Standard methods: List). + rpc ListShipments(ListShipmentsRequest) returns (ListShipmentsResponse) { + option (google.api.http) = {get: "/v1/{parent=shippers/*}/shipments"}; + option (google.api.method_signature) = "parent"; + } + + // Create a shipment. + // See: https://google.aip.dev/133 (Standard methods: Create). + rpc CreateShipment(CreateShipmentRequest) returns (Shipment) { + option (google.api.http) = { + post: "/v1/{parent=shippers/*}/shipments" + body: "shipment" + }; + option (google.api.method_signature) = "parent,shipment"; + } + + // Update a shipment. + // See: https://google.aip.dev/134 (Standard methods: Update). + rpc UpdateShipment(UpdateShipmentRequest) returns (Shipment) { + option (google.api.http) = { + patch: "/v1/{shipment.name=shippers/*/shipments/*}" + body: "shipment" + }; + option (google.api.method_signature) = "shipment,update_mask"; + } + + // Delete a shipment. + // See: https://google.aip.dev/135 (Standard methods: Delete). + // See: https://google.aip.dev/164 (Soft delete). + rpc DeleteShipment(DeleteShipmentRequest) returns (Shipment) { + option (google.api.http) = {delete: "/v1/{name=shippers/*/shipments/*}"}; + option (google.api.method_signature) = "name"; + } +} + +// Request message for FreightService.GetShipper. +message GetShipperRequest { + // The resource name of the shipper to retrieve. + // Format: shippers/{shipper} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Shipper" + ]; +} + +// Request message for FreightService.ListShippers. +message ListShippersRequest { + // Requested page size. Server may return fewer shippers than requested. + // If unspecified, server will pick an appropriate default. + int32 page_size = 1; + + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListShippersResponse.next_page_token][einride.example.freight.v1.ListShippersResponse.next_page_token] + // returned from the previous call to `ListShippers` method. + string page_token = 2; +} + +// Response message for FreightService.ListShippers. +message ListShippersResponse { + // The list of shippers. + repeated Shipper shippers = 1; + + // A token to retrieve next page of results. Pass this value in the + // [ListShippersRequest.page_token][einride.example.freight.v1.ListShippersRequest.page_token] + // field in the subsequent call to `ListShippers` method to retrieve the next + // page of results. + string next_page_token = 2; +} + +// Request message for FreightService.CreateShipper. +message CreateShipperRequest { + // The shipper to create. + Shipper shipper = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for FreightService.UpdateShipper. +message UpdateShipperRequest { + // The shipper to update with. The name must match or be empty. + // The shipper's `name` field is used to identify the shipper to be updated. + // Format: shippers/{shipper} + Shipper shipper = 1 [(google.api.field_behavior) = REQUIRED]; + + // The list of fields to be updated. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for FreightService.DeleteShipper. +message DeleteShipperRequest { + // The resource name of the shipper to delete. + // Format: shippers/{shipper} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Shipper" + ]; +} + +// Request message for FreightService.GetSite. +message GetSiteRequest { + // The resource name of the site to retrieve. + // Format: shippers/{shipper}/sites/{site} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Site" + ]; +} + +// Request message for FreightService.ListSites. +message ListSitesRequest { + // The resource name of the parent, which owns this collection of sites. + // Format: shippers/{shipper} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "freight-example.einride.tech/Shipper" + child_type: "freight-example.einride.tech/Site" + } + ]; + + // Requested page size. Server may return fewer sites than requested. + // If unspecified, server will pick an appropriate default. + int32 page_size = 2; + + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListSitesResponse.next_page_token][einride.example.freight.v1.ListSitesResponse.next_page_token] + // returned from the previous call to `ListSites` method. + string page_token = 3; +} + +// Response message for FreightService.ListSites. +message ListSitesResponse { + // The list of sites. + repeated Site sites = 1; + + // A token to retrieve next page of results. Pass this value in the + // [ListSitesRequest.page_token][einride.example.freight.v1.ListSitesRequest.page_token] + // field in the subsequent call to `ListSites` method to retrieve the next + // page of results. + string next_page_token = 2; +} + +// Request message for FreightService.CreateSite. +message CreateSiteRequest { + // The resource name of the parent shipper for which this site will be created. + // Format: shippers/{shipper} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = {child_type: "freight-example.einride.tech/Shipper"} + ]; + // The site to create. + Site site = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for FreightService.UpdateSite. +message UpdateSiteRequest { + // The site to update with. The name must match or be empty. + // The site's `name` field is used to identify the site to be updated. + // Format: shippers/{shipper}/sites/{site} + Site site = 1 [(google.api.field_behavior) = REQUIRED]; + + // The list of fields to be updated. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for FreightService.DeleteSite. +message DeleteSiteRequest { + // The resource name of the site to delete. + // Format: shippers/{shipper}/sites/{site} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Site" + ]; +} + +// Request message for FreightService.GetShipment. +message GetShipmentRequest { + // The resource name of the shipment to retrieve. + // Format: shippers/{shipper}/shipments/{shipment} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Shipment" + ]; +} + +// Request message for FreightService.ListShipments. +message ListShipmentsRequest { + // The resource name of the parent, which owns this collection of shipments. + // Format: shippers/{shipper} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "freight-example.einride.tech/Shipper" + child_type: "freight-example.einride.tech/Shipment" + } + ]; + + // Requested page size. Server may return fewer shipments than requested. + // If unspecified, server will pick an appropriate default. + int32 page_size = 2; + + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListShipmentsResponse.next_page_token][einride.example.freight.v1.ListShipmentsResponse.next_page_token] + // returned from the previous call to `ListShipments` method. + string page_token = 3; +} + +// Response message for FreightService.ListShipments. +message ListShipmentsResponse { + // The list of shipments. + repeated Shipment shipments = 1; + + // A token to retrieve next page of results. Pass this value in the + // [ListShipmentsRequest.page_token][einride.example.freight.v1.ListShipmentsRequest.page_token] + // field in the subsequent call to `ListShipments` method to retrieve the next + // page of results. + string next_page_token = 2; +} + +// Request message for FreightService.CreateShipment. +message CreateShipmentRequest { + // The resource name of the parent shipper for which this shipment will be created. + // Format: shippers/{shipper} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = {child_type: "freight-example.einride.tech/Shipper"} + ]; + // The shipment to create. + Shipment shipment = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for FreightService.UpdateShipment. +message UpdateShipmentRequest { + // The shipment to update with. The name must match or be empty. + // The shipment's `name` field is used to identify the shipment to be updated. + // Format: shippers/{shipper}/shipments/{shipment} + Shipment shipment = 1 [(google.api.field_behavior) = REQUIRED]; + + // The list of fields to be updated. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for FreightService.DeleteShipment. +message DeleteShipmentRequest { + // The resource name of the shipment to delete. + // Format: shippers/{shipper}/shipments/{shipment} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Shipment" + ]; +} diff --git a/examples/proto/einride/example/freight/v1/shipment.proto b/examples/proto/einride/example/freight/v1/shipment.proto new file mode 100644 index 0000000..f2affa4 --- /dev/null +++ b/examples/proto/einride/example/freight/v1/shipment.proto @@ -0,0 +1,77 @@ +syntax = "proto3"; + +package einride.example.freight.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/timestamp.proto"; + +// A shipment represents transportation of goods between an origin +// [site][einride.example.freight.v1.Site] and a destination +// [site][einride.example.freight.v1.Site]. +message Shipment { + option (google.api.resource) = { + type: "freight-example.einride.tech/Shipment" + pattern: "shippers/{shipper}/shipments/{shipment}" + singular: "shipment" + plural: "shipments" + }; + + // The resource name of the shipment. + string name = 1; + + // The creation timestamp of the shipment. + google.protobuf.Timestamp create_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The last update timestamp of the shipment. + // + // Updated when create/update/delete operation is shipment. + google.protobuf.Timestamp update_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The deletion timestamp of the shipment. + google.protobuf.Timestamp delete_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The resource name of the origin site of the shipment. + // Format: shippers/{shipper}/sites/{site} + string origin_site = 5 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Site" + ]; + + // The resource name of the destination site of the shipment. + // Format: shippers/{shipper}/sites/{site} + string destination_site = 6 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference).type = "freight-example.einride.tech/Site" + ]; + + // The earliest pickup time of the shipment at the origin site. + google.protobuf.Timestamp pickup_earliest_time = 7 [(google.api.field_behavior) = REQUIRED]; + + // The latest pickup time of the shipment at the origin site. + google.protobuf.Timestamp pickup_latest_time = 8 [(google.api.field_behavior) = REQUIRED]; + + // The earliest delivery time of the shipment at the destination site. + google.protobuf.Timestamp delivery_earliest_time = 9 [(google.api.field_behavior) = REQUIRED]; + + // The latest delivery time of the shipment at the destination site. + google.protobuf.Timestamp delivery_latest_time = 10 [(google.api.field_behavior) = REQUIRED]; + + // The line items of the shipment. + repeated LineItem line_items = 11; + + // Annotations of the shipment. + map annotations = 12; +} + +// A shipment line item. +message LineItem { + // The title of the line item. + string title = 1; + // The quantity of the line item. + float quantity = 2; + // The weight of the line item in kilograms. + float weight_kg = 3; + // The volume of the line item in cubic meters. + float volume_m3 = 4; +} diff --git a/examples/proto/einride/example/freight/v1/shipper.proto b/examples/proto/einride/example/freight/v1/shipper.proto new file mode 100644 index 0000000..a9f3ac2 --- /dev/null +++ b/examples/proto/einride/example/freight/v1/shipper.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package einride.example.freight.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/timestamp.proto"; + +// A shipper is a supplier or owner of goods to be transported. +message Shipper { + option (google.api.resource) = { + type: "freight-example.einride.tech/Shipper" + pattern: "shippers/{shipper}" + singular: "shipper" + plural: "shippers" + }; + // The resource name of the shipper. + string name = 1; + // The creation timestamp of the shipper. + google.protobuf.Timestamp create_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + // The last update timestamp of the shipper. + // + // Updated when create/update/delete operation is performed. + google.protobuf.Timestamp update_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + // The deletion timestamp of the shipper. + google.protobuf.Timestamp delete_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + // The display name of the shipper. + string display_name = 5 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/examples/proto/einride/example/freight/v1/site.proto b/examples/proto/einride/example/freight/v1/site.proto new file mode 100644 index 0000000..370e673 --- /dev/null +++ b/examples/proto/einride/example/freight/v1/site.proto @@ -0,0 +1,33 @@ +syntax = "proto3"; + +package einride.example.freight.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/timestamp.proto"; +import "google/type/latlng.proto"; + +// A site is a node in a [shipper][einride.example.freight.v1.Shipper]'s +// transport network. +message Site { + option (google.api.resource) = { + type: "freight-example.einride.tech/Site" + pattern: "shippers/{shipper}/sites/{site}" + singular: "site" + plural: "sites" + }; + // The resource name of the site. + string name = 1; + // The creation timestamp of the site. + google.protobuf.Timestamp create_time = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + // The last update timestamp of the site. + // + // Updated when create/update/delete operation is performed. + google.protobuf.Timestamp update_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + // The deletion timestamp of the site. + google.protobuf.Timestamp delete_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + // The display name of the site. + string display_name = 5 [(google.api.field_behavior) = REQUIRED]; + // The geographic location of the site. + google.type.LatLng lat_lng = 6; +} diff --git a/examples/proto/einride/example/syntax/v1/syntax.proto b/examples/proto/einride/example/syntax/v1/syntax.proto new file mode 100644 index 0000000..37fcbf7 --- /dev/null +++ b/examples/proto/einride/example/syntax/v1/syntax.proto @@ -0,0 +1,240 @@ +syntax = "proto3"; + +package einride.example.syntax.v1; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/wrappers.proto"; + +// Message +message Message { + // NestedMessage + message NestedMessage { + // nested_message.string + string string = 1; + } + // NestedEnum + enum NestedEnum { + // NESTEDENUM_UNSPECIFIED + NESTEDENUM_UNSPECIFIED = 0; + } + // double + double double = 1; + // float + float float = 2; + // int32 + int32 int32 = 3; + // int64 + int64 int64 = 4; + // uint32 + uint32 uint32 = 5; + // uint64 + uint64 uint64 = 6; + // sint32 + sint32 sint32 = 7; + // sint64 + sint64 sint64 = 8; + // fixed32 + fixed32 fixed32 = 9; + // fixed64 + fixed64 fixed64 = 10; + // sfixed32 + sfixed32 sfixed32 = 11; + // sfixed64 + sfixed64 sfixed64 = 12; + // bool + bool bool = 13; + // string + string string = 14; + // bytes + bytes bytes = 15; + // enum + Enum enum = 16; + // message + Message message = 17; + + // optional double + optional double optional_double = 81; + // optional float + optional float optional_float = 82; + // optional int32 + optional int32 optional_int32 = 83; + // optional int64 + optional int64 optional_int64 = 84; + // optional uint32 + optional uint32 optional_uint32 = 85; + // optional uint64 + optional uint64 optional_uint64 = 86; + // optional sint32 + optional sint32 optional_sint32 = 87; + // optional sint64 + optional sint64 optional_sint64 = 88; + // optional fixed32 + optional fixed32 optional_fixed32 = 89; + // optional fixed64 + optional fixed64 optional_fixed64 = 90; + // optional sfixed32 + optional sfixed32 optional_sfixed32 = 91; + // optional sfixed64 + optional sfixed64 optional_sfixed64 = 92; + // optional bool + optional bool optional_bool = 93; + // optional string + optional string optional_string = 94; + // optional bytes + optional bytes optional_bytes = 95; + // optional enum + optional Enum optional_enum = 96; + // optional message + optional Message optional_message = 97; + + // repeated_double + repeated double repeated_double = 18; + // repeated_float + repeated float repeated_float = 19; + // repeated_int32 + repeated int32 repeated_int32 = 20; + // repeated_int64 + repeated int64 repeated_int64 = 21; + // repeated_uint32 + repeated uint32 repeated_uint32 = 22; + // repeated_uint64 + repeated uint64 repeated_uint64 = 23; + // repeated_sint32 + repeated sint32 repeated_sint32 = 24; + // repeated_sint64 + repeated sint64 repeated_sint64 = 25; + // repeated_fixed32 + repeated fixed32 repeated_fixed32 = 26; + // repeated_fixed64 + repeated fixed64 repeated_fixed64 = 27; + // repeated_sfixed32 + repeated sfixed32 repeated_sfixed32 = 28; + // repeated_sfixed64 + repeated sfixed64 repeated_sfixed64 = 29; + // repeated_bool + repeated bool repeated_bool = 30; + // repeated_string + repeated string repeated_string = 31; + // repeated_bytes + repeated bytes repeated_bytes = 32; + // repeated_enum + repeated Enum repeated_enum = 33; + // repeated_message + repeated Message repeated_message = 34; + + // map_string_string + map map_string_string = 35; + // map_string_message + map map_string_message = 36; + + // oneof + oneof oneof { + // oneof_string + string oneof_string = 37; + // oneof_enum + Enum oneof_enum = 38; + // oneof_message1 + Message oneof_message1 = 39; + // oneof_message2 + Message oneof_message2 = 40; + } + + // any + google.protobuf.Any any = 41; + // repeated_any + repeated google.protobuf.Any repeated_any = 42; + + // duration + google.protobuf.Duration duration = 43; + // repeated_duration + repeated google.protobuf.Duration repeated_duration = 44; + + // empty + google.protobuf.Empty empty = 45; + // repeated_empty + repeated google.protobuf.Empty repeated_empty = 46; + + // field_mask + google.protobuf.FieldMask field_mask = 47; + // repeated_field_mask + repeated google.protobuf.FieldMask repeated_field_mask = 48; + + // struct + google.protobuf.Struct struct = 49; + // repeated_struct + repeated google.protobuf.Struct repeated_struct = 50; + + // value + google.protobuf.Value value = 51; + // repeated_value + repeated google.protobuf.Value repeated_value = 52; + + // null_value + google.protobuf.NullValue null_value = 53; + // repeated_null_value + repeated google.protobuf.NullValue repeated_null_value = 54; + + // list_value + google.protobuf.ListValue list_value = 55; + // repeated_list_value + repeated google.protobuf.ListValue repeated_list_value = 56; + + // bool_value + google.protobuf.BoolValue bool_value = 57; + // repeated_bool_value + repeated google.protobuf.BoolValue repeated_bool_value = 58; + + // bytes_value + google.protobuf.BytesValue bytes_value = 59; + // repeated_bytes_value + repeated google.protobuf.BytesValue repeated_bytes_value = 60; + + // double_value + google.protobuf.DoubleValue double_value = 61; + // repeated_double_value + repeated google.protobuf.DoubleValue repeated_double_value = 62; + + // float_value + google.protobuf.FloatValue float_value = 63; + // repeated_float_value + repeated google.protobuf.FloatValue repeated_float_value = 64; + + // int32_value + google.protobuf.Int32Value int32_value = 65; + // repeated_int32_value + repeated google.protobuf.Int32Value repeated_int32_value = 66; + + // int64_value + google.protobuf.Int64Value int64_value = 67; + // repeated_int64_value + repeated google.protobuf.Int64Value repeated_int64_value = 68; + + // uint32_value + google.protobuf.UInt32Value uint32_value = 69; + // repeated_uint32_value + repeated google.protobuf.UInt32Value repeated_uint32_value = 70; + + // uint64_value + google.protobuf.UInt64Value uint64_value = 71; + // repeated_uint64_value + repeated google.protobuf.UInt64Value repeated_uint64_value = 72; + + // string_value + google.protobuf.UInt64Value string_value = 73; + // repeated_string_value + repeated google.protobuf.StringValue repeated_string_value = 74; +} + +// Enum +enum Enum { + // ENUM_UNSPECIFIED + ENUM_UNSPECIFIED = 0; + // ENUM_ONE + ENUM_ONE = 1; + // ENUM_TWO + ENUM_TWO = 2; +} diff --git a/examples/proto/einride/example/syntax/v1/syntax_service.proto b/examples/proto/einride/example/syntax/v1/syntax_service.proto new file mode 100644 index 0000000..f1c5407 --- /dev/null +++ b/examples/proto/einride/example/syntax/v1/syntax_service.proto @@ -0,0 +1,51 @@ +syntax = "proto3"; + +package einride.example.syntax.v1; + +import "einride/example/syntax/v1/syntax.proto"; +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; + +service SyntaxService { + rpc QueryOnly(Request) returns (Message) { + option (google.api.http) = {get: "/v1"}; + } + + rpc EmptyVerb(google.protobuf.Empty) returns (google.protobuf.Empty) { + option (google.api.http) = {get: "/v1:emptyVerb"}; + } + + rpc StarBody(Request) returns (Message) { + option (google.api.http) = { + post: "/v1:starBody" + body: "*" + }; + } + + rpc Body(Request) returns (Message) { + option (google.api.http) = { + post: "/v1:body" + body: "nested" + }; + } + + rpc Path(Request) returns (Message) { + option (google.api.http) = {post: "/v1/{string}:path"}; + } + + rpc PathBody(Request) returns (Message) { + option (google.api.http) = { + post: "/v1/{string}:pathBody" + body: "nested" + }; + } +} + +message Request { + string string = 1; + repeated string repeated_string = 2; + message Nested { + string string = 1; + } + Nested nested = 3; +} diff --git a/examples/proto/einride/example/syntax/v2/forward.proto b/examples/proto/einride/example/syntax/v2/forward.proto new file mode 100644 index 0000000..6c8e951 --- /dev/null +++ b/examples/proto/einride/example/syntax/v2/forward.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package einride.example.syntax.v2; + +import "einride/example/syntax/v1/syntax.proto"; + +// Message +message Message { + einride.example.syntax.v1.Message forwarded_message = 1; + einride.example.syntax.v1.Enum forwarded_enum = 2; +} diff --git a/examples/proto/gen/typescript/einride/example/freight/v1/index.ts b/examples/proto/gen/typescript/einride/example/freight/v1/index.ts new file mode 100644 index 0000000..058d98d --- /dev/null +++ b/examples/proto/gen/typescript/einride/example/freight/v1/index.ts @@ -0,0 +1,760 @@ +// Code generated by protoc-gen-typescript-http. DO NOT EDIT. +/* eslint-disable camelcase */ +// @ts-nocheck + +// A shipment represents transportation of goods between an origin +// [site][einride.example.freight.v1.Site] and a destination +// [site][einride.example.freight.v1.Site]. +export type Shipment = { + // The resource name of the shipment. + name: string | undefined; + // The creation timestamp of the shipment. + // + // Behaviors: OUTPUT_ONLY + createTime: wellKnownTimestamp | undefined; + // The last update timestamp of the shipment. + // Updated when create/update/delete operation is shipment. + // + // Behaviors: OUTPUT_ONLY + updateTime: wellKnownTimestamp | undefined; + // The deletion timestamp of the shipment. + // + // Behaviors: OUTPUT_ONLY + deleteTime: wellKnownTimestamp | undefined; + // The resource name of the origin site of the shipment. + // Format: shippers/{shipper}/sites/{site} + // + // Behaviors: REQUIRED + originSite: string | undefined; + // The resource name of the destination site of the shipment. + // Format: shippers/{shipper}/sites/{site} + // + // Behaviors: REQUIRED + destinationSite: string | undefined; + // The earliest pickup time of the shipment at the origin site. + // + // Behaviors: REQUIRED + pickupEarliestTime: wellKnownTimestamp | undefined; + // The latest pickup time of the shipment at the origin site. + // + // Behaviors: REQUIRED + pickupLatestTime: wellKnownTimestamp | undefined; + // The earliest delivery time of the shipment at the destination site. + // + // Behaviors: REQUIRED + deliveryEarliestTime: wellKnownTimestamp | undefined; + // The latest delivery time of the shipment at the destination site. + // + // Behaviors: REQUIRED + deliveryLatestTime: wellKnownTimestamp | undefined; + // The line items of the shipment. + lineItems: LineItem[] | undefined; + // Annotations of the shipment. + annotations: { [key: string]: string } | undefined; +}; + +// Encoded using RFC 3339, where generated output will always be Z-normalized +// and uses 0, 3, 6 or 9 fractional digits. +// Offsets other than "Z" are also accepted. +type wellKnownTimestamp = string; + +// A shipment line item. +export type LineItem = { + // The title of the line item. + title: string | undefined; + // The quantity of the line item. + quantity: number | undefined; + // The weight of the line item in kilograms. + weightKg: number | undefined; + // The volume of the line item in cubic meters. + volumeM3: number | undefined; +}; + +// A shipper is a supplier or owner of goods to be transported. +export type Shipper = { + // The resource name of the shipper. + name: string | undefined; + // The creation timestamp of the shipper. + // + // Behaviors: OUTPUT_ONLY + createTime: wellKnownTimestamp | undefined; + // The last update timestamp of the shipper. + // Updated when create/update/delete operation is performed. + // + // Behaviors: OUTPUT_ONLY + updateTime: wellKnownTimestamp | undefined; + // The deletion timestamp of the shipper. + // + // Behaviors: OUTPUT_ONLY + deleteTime: wellKnownTimestamp | undefined; + // The display name of the shipper. + // + // Behaviors: REQUIRED + displayName: string | undefined; +}; + +// A site is a node in a [shipper][einride.example.freight.v1.Shipper]'s +// transport network. +export type Site = { + // The resource name of the site. + name: string | undefined; + // The creation timestamp of the site. + // + // Behaviors: OUTPUT_ONLY + createTime: wellKnownTimestamp | undefined; + // The last update timestamp of the site. + // Updated when create/update/delete operation is performed. + // + // Behaviors: OUTPUT_ONLY + updateTime: wellKnownTimestamp | undefined; + // The deletion timestamp of the site. + // + // Behaviors: OUTPUT_ONLY + deleteTime: wellKnownTimestamp | undefined; + // The display name of the site. + // + // Behaviors: REQUIRED + displayName: string | undefined; + // The geographic location of the site. + latLng: googletype_LatLng | undefined; +}; + +// An object that represents a latitude/longitude pair. This is expressed as a +// pair of doubles to represent degrees latitude and degrees longitude. Unless +// specified otherwise, this must conform to the +// WGS84 +// standard. Values must be within normalized ranges. +export type googletype_LatLng = { + // The latitude in degrees. It must be in the range [-90.0, +90.0]. + latitude: number | undefined; + // The longitude in degrees. It must be in the range [-180.0, +180.0]. + longitude: number | undefined; +}; + +// Request message for FreightService.GetShipper. +export type GetShipperRequest = { + // The resource name of the shipper to retrieve. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + name: string | undefined; +}; + +// Request message for FreightService.ListShippers. +export type ListShippersRequest = { + // Requested page size. Server may return fewer shippers than requested. + // If unspecified, server will pick an appropriate default. + pageSize: number | undefined; + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListShippersResponse.next_page_token][einride.example.freight.v1.ListShippersResponse.next_page_token] + // returned from the previous call to `ListShippers` method. + pageToken: string | undefined; +}; + +// Response message for FreightService.ListShippers. +export type ListShippersResponse = { + // The list of shippers. + shippers: Shipper[] | undefined; + // A token to retrieve next page of results. Pass this value in the + // [ListShippersRequest.page_token][einride.example.freight.v1.ListShippersRequest.page_token] + // field in the subsequent call to `ListShippers` method to retrieve the next + // page of results. + nextPageToken: string | undefined; +}; + +// Request message for FreightService.CreateShipper. +export type CreateShipperRequest = { + // The shipper to create. + // + // Behaviors: REQUIRED + shipper: Shipper | undefined; +}; + +// Request message for FreightService.UpdateShipper. +export type UpdateShipperRequest = { + // The shipper to update with. The name must match or be empty. + // The shipper's `name` field is used to identify the shipper to be updated. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + shipper: Shipper | undefined; + // The list of fields to be updated. + updateMask: wellKnownFieldMask | undefined; +}; + +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +type wellKnownFieldMask = string; + +// Request message for FreightService.DeleteShipper. +export type DeleteShipperRequest = { + // The resource name of the shipper to delete. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + name: string | undefined; +}; + +// Request message for FreightService.GetSite. +export type GetSiteRequest = { + // The resource name of the site to retrieve. + // Format: shippers/{shipper}/sites/{site} + // + // Behaviors: REQUIRED + name: string | undefined; +}; + +// Request message for FreightService.ListSites. +export type ListSitesRequest = { + // The resource name of the parent, which owns this collection of sites. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + parent: string | undefined; + // Requested page size. Server may return fewer sites than requested. + // If unspecified, server will pick an appropriate default. + pageSize: number | undefined; + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListSitesResponse.next_page_token][einride.example.freight.v1.ListSitesResponse.next_page_token] + // returned from the previous call to `ListSites` method. + pageToken: string | undefined; +}; + +// Response message for FreightService.ListSites. +export type ListSitesResponse = { + // The list of sites. + sites: Site[] | undefined; + // A token to retrieve next page of results. Pass this value in the + // [ListSitesRequest.page_token][einride.example.freight.v1.ListSitesRequest.page_token] + // field in the subsequent call to `ListSites` method to retrieve the next + // page of results. + nextPageToken: string | undefined; +}; + +// Request message for FreightService.CreateSite. +export type CreateSiteRequest = { + // The resource name of the parent shipper for which this site will be created. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + parent: string | undefined; + // The site to create. + // + // Behaviors: REQUIRED + site: Site | undefined; +}; + +// Request message for FreightService.UpdateSite. +export type UpdateSiteRequest = { + // The site to update with. The name must match or be empty. + // The site's `name` field is used to identify the site to be updated. + // Format: shippers/{shipper}/sites/{site} + // + // Behaviors: REQUIRED + site: Site | undefined; + // The list of fields to be updated. + updateMask: wellKnownFieldMask | undefined; +}; + +// Request message for FreightService.DeleteSite. +export type DeleteSiteRequest = { + // The resource name of the site to delete. + // Format: shippers/{shipper}/sites/{site} + // + // Behaviors: REQUIRED + name: string | undefined; +}; + +// Request message for FreightService.GetShipment. +export type GetShipmentRequest = { + // The resource name of the shipment to retrieve. + // Format: shippers/{shipper}/shipments/{shipment} + // + // Behaviors: REQUIRED + name: string | undefined; +}; + +// Request message for FreightService.ListShipments. +export type ListShipmentsRequest = { + // The resource name of the parent, which owns this collection of shipments. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + parent: string | undefined; + // Requested page size. Server may return fewer shipments than requested. + // If unspecified, server will pick an appropriate default. + pageSize: number | undefined; + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListShipmentsResponse.next_page_token][einride.example.freight.v1.ListShipmentsResponse.next_page_token] + // returned from the previous call to `ListShipments` method. + pageToken: string | undefined; +}; + +// Response message for FreightService.ListShipments. +export type ListShipmentsResponse = { + // The list of shipments. + shipments: Shipment[] | undefined; + // A token to retrieve next page of results. Pass this value in the + // [ListShipmentsRequest.page_token][einride.example.freight.v1.ListShipmentsRequest.page_token] + // field in the subsequent call to `ListShipments` method to retrieve the next + // page of results. + nextPageToken: string | undefined; +}; + +// Request message for FreightService.CreateShipment. +export type CreateShipmentRequest = { + // The resource name of the parent shipper for which this shipment will be created. + // Format: shippers/{shipper} + // + // Behaviors: REQUIRED + parent: string | undefined; + // The shipment to create. + // + // Behaviors: REQUIRED + shipment: Shipment | undefined; +}; + +// Request message for FreightService.UpdateShipment. +export type UpdateShipmentRequest = { + // The shipment to update with. The name must match or be empty. + // The shipment's `name` field is used to identify the shipment to be updated. + // Format: shippers/{shipper}/shipments/{shipment} + // + // Behaviors: REQUIRED + shipment: Shipment | undefined; + // The list of fields to be updated. + updateMask: wellKnownFieldMask | undefined; +}; + +// Request message for FreightService.DeleteShipment. +export type DeleteShipmentRequest = { + // The resource name of the shipment to delete. + // Format: shippers/{shipper}/shipments/{shipment} + // + // Behaviors: REQUIRED + name: string | undefined; +}; + +// This API represents a simple freight service. +// It defines the following resource model: +// - The API has a collection of [Shipper][einride.example.freight.v1.Shipper] +// resources, named `shippers/*` +// - Each Shipper has a collection of [Site][einride.example.freight.v1.Site] +// resources, named `shippers/*/sites/*` +// - Each Shipper has a collection of [Shipment][einride.example.freight.v1.Shipment] +// resources, named `shippers/*/shipments/*` +export interface FreightService { + // Get a shipper. + // See: https://google.aip.dev/131 (Standard methods: Get). + GetShipper(request: GetShipperRequest): Promise; + // List shippers. + // See: https://google.aip.dev/132 (Standard methods: List). + ListShippers(request: ListShippersRequest): Promise; + // Create a shipper. + // See: https://google.aip.dev/133 (Standard methods: Create). + CreateShipper(request: CreateShipperRequest): Promise; + // Update a shipper. + // See: https://google.aip.dev/134 (Standard methods: Update). + UpdateShipper(request: UpdateShipperRequest): Promise; + // Delete a shipper. + // See: https://google.aip.dev/135 (Standard methods: Delete). + // See: https://google.aip.dev/164 (Soft delete). + DeleteShipper(request: DeleteShipperRequest): Promise; + // Get a site. + // See: https://google.aip.dev/131 (Standard methods: Get). + GetSite(request: GetSiteRequest): Promise; + // List sites for a shipper. + // See: https://google.aip.dev/132 (Standard methods: List). + ListSites(request: ListSitesRequest): Promise; + // Create a site. + // See: https://google.aip.dev/133 (Standard methods: Create). + CreateSite(request: CreateSiteRequest): Promise; + // Update a site. + // See: https://google.aip.dev/134 (Standard methods: Update). + UpdateSite(request: UpdateSiteRequest): Promise; + // Delete a site. + // See: https://google.aip.dev/135 (Standard methods: Delete). + // See: https://google.aip.dev/164 (Soft delete). + DeleteSite(request: DeleteSiteRequest): Promise; + // Get a shipment. + // See: https://google.aip.dev/131 (Standard methods: Get). + GetShipment(request: GetShipmentRequest): Promise; + // List shipments for a shipper. + // See: https://google.aip.dev/132 (Standard methods: List). + ListShipments(request: ListShipmentsRequest): Promise; + // Create a shipment. + // See: https://google.aip.dev/133 (Standard methods: Create). + CreateShipment(request: CreateShipmentRequest): Promise; + // Update a shipment. + // See: https://google.aip.dev/134 (Standard methods: Update). + UpdateShipment(request: UpdateShipmentRequest): Promise; + // Delete a shipment. + // See: https://google.aip.dev/135 (Standard methods: Delete). + // See: https://google.aip.dev/164 (Soft delete). + DeleteShipment(request: DeleteShipmentRequest): Promise; +} + +type RequestType = { + path: string; + method: string; + body: string | null; +}; + +type RequestHandler = (request: RequestType, meta: { service: string, method: string }) => Promise; + +export function createFreightServiceClient( + handler: RequestHandler +): FreightService { + return { + GetShipper(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.name) { + throw new Error("missing required field request.name"); + } + const path = `v1/${request.name}`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "FreightService", + method: "GetShipper", + }) as Promise; + }, + ListShippers(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + const path = `v1/shippers`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + if (request.pageSize) { + queryParams.push(`pageSize=${encodeURIComponent(request.pageSize.toString())}`) + } + if (request.pageToken) { + queryParams.push(`pageToken=${encodeURIComponent(request.pageToken.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "FreightService", + method: "ListShippers", + }) as Promise; + }, + CreateShipper(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + const path = `v1/shippers`; // eslint-disable-line quotes + const body = JSON.stringify(request?.shipper ?? {}); + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "FreightService", + method: "CreateShipper", + }) as Promise; + }, + UpdateShipper(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.shipper?.name) { + throw new Error("missing required field request.shipper.name"); + } + const path = `v1/${request.shipper.name}`; // eslint-disable-line quotes + const body = JSON.stringify(request?.shipper ?? {}); + const queryParams: string[] = []; + if (request.updateMask) { + queryParams.push(`updateMask=${encodeURIComponent(request.updateMask.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "PATCH", + body, + }, { + service: "FreightService", + method: "UpdateShipper", + }) as Promise; + }, + DeleteShipper(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.name) { + throw new Error("missing required field request.name"); + } + const path = `v1/${request.name}`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "DELETE", + body, + }, { + service: "FreightService", + method: "DeleteShipper", + }) as Promise; + }, + GetSite(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.name) { + throw new Error("missing required field request.name"); + } + const path = `v1/${request.name}`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "FreightService", + method: "GetSite", + }) as Promise; + }, + ListSites(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.parent) { + throw new Error("missing required field request.parent"); + } + const path = `v1/${request.parent}/sites`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + if (request.pageSize) { + queryParams.push(`pageSize=${encodeURIComponent(request.pageSize.toString())}`) + } + if (request.pageToken) { + queryParams.push(`pageToken=${encodeURIComponent(request.pageToken.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "FreightService", + method: "ListSites", + }) as Promise; + }, + CreateSite(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.parent) { + throw new Error("missing required field request.parent"); + } + const path = `v1/${request.parent}/sites`; // eslint-disable-line quotes + const body = JSON.stringify(request?.site ?? {}); + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "FreightService", + method: "CreateSite", + }) as Promise; + }, + UpdateSite(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.site?.name) { + throw new Error("missing required field request.site.name"); + } + const path = `v1/${request.site.name}`; // eslint-disable-line quotes + const body = JSON.stringify(request?.site ?? {}); + const queryParams: string[] = []; + if (request.updateMask) { + queryParams.push(`updateMask=${encodeURIComponent(request.updateMask.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "PATCH", + body, + }, { + service: "FreightService", + method: "UpdateSite", + }) as Promise; + }, + DeleteSite(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.name) { + throw new Error("missing required field request.name"); + } + const path = `v1/${request.name}`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "DELETE", + body, + }, { + service: "FreightService", + method: "DeleteSite", + }) as Promise; + }, + GetShipment(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.name) { + throw new Error("missing required field request.name"); + } + const path = `v1/${request.name}`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "FreightService", + method: "GetShipment", + }) as Promise; + }, + ListShipments(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.parent) { + throw new Error("missing required field request.parent"); + } + const path = `v1/${request.parent}/shipments`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + if (request.pageSize) { + queryParams.push(`pageSize=${encodeURIComponent(request.pageSize.toString())}`) + } + if (request.pageToken) { + queryParams.push(`pageToken=${encodeURIComponent(request.pageToken.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "FreightService", + method: "ListShipments", + }) as Promise; + }, + CreateShipment(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.parent) { + throw new Error("missing required field request.parent"); + } + const path = `v1/${request.parent}/shipments`; // eslint-disable-line quotes + const body = JSON.stringify(request?.shipment ?? {}); + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "FreightService", + method: "CreateShipment", + }) as Promise; + }, + UpdateShipment(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.shipment?.name) { + throw new Error("missing required field request.shipment.name"); + } + const path = `v1/${request.shipment.name}`; // eslint-disable-line quotes + const body = JSON.stringify(request?.shipment ?? {}); + const queryParams: string[] = []; + if (request.updateMask) { + queryParams.push(`updateMask=${encodeURIComponent(request.updateMask.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "PATCH", + body, + }, { + service: "FreightService", + method: "UpdateShipment", + }) as Promise; + }, + DeleteShipment(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.name) { + throw new Error("missing required field request.name"); + } + const path = `v1/${request.name}`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "DELETE", + body, + }, { + service: "FreightService", + method: "DeleteShipment", + }) as Promise; + }, + }; +} + +// @@protoc_insertion_point(typescript-http-eof) diff --git a/examples/proto/gen/typescript/einride/example/syntax/v1/index.ts b/examples/proto/gen/typescript/einride/example/syntax/v1/index.ts new file mode 100644 index 0000000..bddca86 --- /dev/null +++ b/examples/proto/gen/typescript/einride/example/syntax/v1/index.ts @@ -0,0 +1,457 @@ +// Code generated by protoc-gen-typescript-http. DO NOT EDIT. +/* eslint-disable camelcase */ +// @ts-nocheck + +// Enum +export type Enum = + // ENUM_UNSPECIFIED + | "ENUM_UNSPECIFIED" + // ENUM_ONE + | "ENUM_ONE" + // ENUM_TWO + | "ENUM_TWO"; +// Message +export type Message = { + // double + double: number | undefined; + // float + float: number | undefined; + // int32 + int32: number | undefined; + // int64 + int64: number | undefined; + // uint32 + uint32: number | undefined; + // uint64 + uint64: number | undefined; + // sint32 + sint32: number | undefined; + // sint64 + sint64: number | undefined; + // fixed32 + fixed32: number | undefined; + // fixed64 + fixed64: number | undefined; + // sfixed32 + sfixed32: number | undefined; + // sfixed64 + sfixed64: number | undefined; + // bool + bool: boolean | undefined; + // string + string: string | undefined; + // bytes + bytes: string | undefined; + // enum + enum: Enum | undefined; + // message + message: Message | undefined; + // optional double + optionalDouble?: number; + // optional float + optionalFloat?: number; + // optional int32 + optionalInt32?: number; + // optional int64 + optionalInt64?: number; + // optional uint32 + optionalUint32?: number; + // optional uint64 + optionalUint64?: number; + // optional sint32 + optionalSint32?: number; + // optional sint64 + optionalSint64?: number; + // optional fixed32 + optionalFixed32?: number; + // optional fixed64 + optionalFixed64?: number; + // optional sfixed32 + optionalSfixed32?: number; + // optional sfixed64 + optionalSfixed64?: number; + // optional bool + optionalBool?: boolean; + // optional string + optionalString?: string; + // optional bytes + optionalBytes?: string; + // optional enum + optionalEnum?: Enum; + // optional message + optionalMessage?: Message; + // repeated_double + repeatedDouble: number[] | undefined; + // repeated_float + repeatedFloat: number[] | undefined; + // repeated_int32 + repeatedInt32: number[] | undefined; + // repeated_int64 + repeatedInt64: number[] | undefined; + // repeated_uint32 + repeatedUint32: number[] | undefined; + // repeated_uint64 + repeatedUint64: number[] | undefined; + // repeated_sint32 + repeatedSint32: number[] | undefined; + // repeated_sint64 + repeatedSint64: number[] | undefined; + // repeated_fixed32 + repeatedFixed32: number[] | undefined; + // repeated_fixed64 + repeatedFixed64: number[] | undefined; + // repeated_sfixed32 + repeatedSfixed32: number[] | undefined; + // repeated_sfixed64 + repeatedSfixed64: number[] | undefined; + // repeated_bool + repeatedBool: boolean[] | undefined; + // repeated_string + repeatedString: string[] | undefined; + // repeated_bytes + repeatedBytes: string[] | undefined; + // repeated_enum + repeatedEnum: Enum[] | undefined; + // repeated_message + repeatedMessage: Message[] | undefined; + // map_string_string + mapStringString: { [key: string]: string } | undefined; + // map_string_message + mapStringMessage: { [key: string]: Message } | undefined; + // oneof_string + oneofString?: string; + // oneof_enum + oneofEnum?: Enum; + // oneof_message1 + oneofMessage1?: Message; + // oneof_message2 + oneofMessage2?: Message; + // any + any: wellKnownAny | undefined; + // repeated_any + repeatedAny: wellKnownAny[] | undefined; + // duration + duration: wellKnownDuration | undefined; + // repeated_duration + repeatedDuration: wellKnownDuration[] | undefined; + // empty + empty: wellKnownEmpty | undefined; + // repeated_empty + repeatedEmpty: wellKnownEmpty[] | undefined; + // field_mask + fieldMask: wellKnownFieldMask | undefined; + // repeated_field_mask + repeatedFieldMask: wellKnownFieldMask[] | undefined; + // struct + struct: wellKnownStruct | undefined; + // repeated_struct + repeatedStruct: wellKnownStruct[] | undefined; + // value + value: wellKnownValue | undefined; + // repeated_value + repeatedValue: wellKnownValue[] | undefined; + // null_value + nullValue: wellKnownNullValue | undefined; + // repeated_null_value + repeatedNullValue: wellKnownNullValue[] | undefined; + // list_value + listValue: wellKnownListValue | undefined; + // repeated_list_value + repeatedListValue: wellKnownListValue[] | undefined; + // bool_value + boolValue: wellKnownBoolValue | undefined; + // repeated_bool_value + repeatedBoolValue: wellKnownBoolValue[] | undefined; + // bytes_value + bytesValue: wellKnownBytesValue | undefined; + // repeated_bytes_value + repeatedBytesValue: wellKnownBytesValue[] | undefined; + // double_value + doubleValue: wellKnownDoubleValue | undefined; + // repeated_double_value + repeatedDoubleValue: wellKnownDoubleValue[] | undefined; + // float_value + floatValue: wellKnownFloatValue | undefined; + // repeated_float_value + repeatedFloatValue: wellKnownFloatValue[] | undefined; + // int32_value + int32Value: wellKnownInt32Value | undefined; + // repeated_int32_value + repeatedInt32Value: wellKnownInt32Value[] | undefined; + // int64_value + int64Value: wellKnownInt64Value | undefined; + // repeated_int64_value + repeatedInt64Value: wellKnownInt64Value[] | undefined; + // uint32_value + uint32Value: wellKnownUInt32Value | undefined; + // repeated_uint32_value + repeatedUint32Value: wellKnownUInt32Value[] | undefined; + // uint64_value + uint64Value: wellKnownUInt64Value | undefined; + // repeated_uint64_value + repeatedUint64Value: wellKnownUInt64Value[] | undefined; + // string_value + stringValue: wellKnownUInt64Value | undefined; + // repeated_string_value + repeatedStringValue: wellKnownStringValue[] | undefined; +}; + +// If the Any contains a value that has a special JSON mapping, +// it will be converted as follows: +// {"@type": xxx, "value": yyy}. +// Otherwise, the value will be converted into a JSON object, +// and the "@type" field will be inserted to indicate the actual data type. +interface wellKnownAny { + "@type": string; + [key: string]: unknown; +} + +// Generated output always contains 0, 3, 6, or 9 fractional digits, +// depending on required precision, followed by the suffix "s". +// Accepted are any fractional digits (also none) as long as they fit +// into nano-seconds precision and the suffix "s" is required. +type wellKnownDuration = string; + +// An empty JSON object +type wellKnownEmpty = Record; + +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +type wellKnownFieldMask = string; + +// Any JSON value. +type wellKnownStruct = Record; + +type wellKnownValue = unknown; + +type wellKnownNullValue = null; + +type wellKnownListValue = wellKnownValue[]; + +type wellKnownBoolValue = boolean | null; + +type wellKnownBytesValue = string | null; + +type wellKnownDoubleValue = number | null; + +type wellKnownFloatValue = number | null; + +type wellKnownInt32Value = number | null; + +type wellKnownInt64Value = number | null; + +type wellKnownUInt32Value = number | null; + +type wellKnownUInt64Value = number | null; + +type wellKnownStringValue = string | null; + +// NestedMessage +export type Message_NestedMessage = { + // nested_message.string + string: string | undefined; +}; + +// NestedEnum +export type Message_NestedEnum = + // NESTEDENUM_UNSPECIFIED + "NESTEDENUM_UNSPECIFIED"; +export type Request = { + string: string | undefined; + repeatedString: string[] | undefined; + nested: Request_Nested | undefined; +}; + +export type Request_Nested = { + string: string | undefined; +}; + +export interface SyntaxService { + QueryOnly(request: Request): Promise; + EmptyVerb(request: wellKnownEmpty): Promise; + StarBody(request: Request): Promise; + Body(request: Request): Promise; + Path(request: Request): Promise; + PathBody(request: Request): Promise; +} + +type RequestType = { + path: string; + method: string; + body: string | null; +}; + +type RequestHandler = (request: RequestType, meta: { service: string, method: string }) => Promise; + +export function createSyntaxServiceClient( + handler: RequestHandler +): SyntaxService { + return { + QueryOnly(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + const path = `v1`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + if (request.string) { + queryParams.push(`string=${encodeURIComponent(request.string.toString())}`) + } + if (request.repeatedString) { + request.repeatedString.forEach((x) => { + queryParams.push(`repeatedString=${encodeURIComponent(x.toString())}`) + }) + } + if (request.nested?.string) { + queryParams.push(`nested.string=${encodeURIComponent(request.nested.string.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "SyntaxService", + method: "QueryOnly", + }) as Promise; + }, + EmptyVerb(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + const path = `v1:emptyVerb`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "GET", + body, + }, { + service: "SyntaxService", + method: "EmptyVerb", + }) as Promise; + }, + StarBody(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + const path = `v1:starBody`; // eslint-disable-line quotes + const body = JSON.stringify(request); + const queryParams: string[] = []; + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "SyntaxService", + method: "StarBody", + }) as Promise; + }, + Body(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + const path = `v1:body`; // eslint-disable-line quotes + const body = JSON.stringify(request?.nested ?? {}); + const queryParams: string[] = []; + if (request.string) { + queryParams.push(`string=${encodeURIComponent(request.string.toString())}`) + } + if (request.repeatedString) { + request.repeatedString.forEach((x) => { + queryParams.push(`repeatedString=${encodeURIComponent(x.toString())}`) + }) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "SyntaxService", + method: "Body", + }) as Promise; + }, + Path(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.string) { + throw new Error("missing required field request.string"); + } + const path = `v1/${request.string}:path`; // eslint-disable-line quotes + const body = null; + const queryParams: string[] = []; + if (request.repeatedString) { + request.repeatedString.forEach((x) => { + queryParams.push(`repeatedString=${encodeURIComponent(x.toString())}`) + }) + } + if (request.nested?.string) { + queryParams.push(`nested.string=${encodeURIComponent(request.nested.string.toString())}`) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "SyntaxService", + method: "Path", + }) as Promise; + }, + PathBody(request) { // eslint-disable-line @typescript-eslint/no-unused-vars + if (!request.string) { + throw new Error("missing required field request.string"); + } + const path = `v1/${request.string}:pathBody`; // eslint-disable-line quotes + const body = JSON.stringify(request?.nested ?? {}); + const queryParams: string[] = []; + if (request.repeatedString) { + request.repeatedString.forEach((x) => { + queryParams.push(`repeatedString=${encodeURIComponent(x.toString())}`) + }) + } + let uri = path; + if (queryParams.length > 0) { + uri += `?${queryParams.join("&")}` + } + return handler({ + path: uri, + method: "POST", + body, + }, { + service: "SyntaxService", + method: "PathBody", + }) as Promise; + }, + }; +} + +// @@protoc_insertion_point(typescript-http-eof) diff --git a/examples/proto/gen/typescript/einride/example/syntax/v2/index.ts b/examples/proto/gen/typescript/einride/example/syntax/v2/index.ts new file mode 100644 index 0000000..8498ae6 --- /dev/null +++ b/examples/proto/gen/typescript/einride/example/syntax/v2/index.ts @@ -0,0 +1,290 @@ +// Code generated by protoc-gen-typescript-http. DO NOT EDIT. +/* eslint-disable camelcase */ +// @ts-nocheck + +// Message +export type Message = { + forwardedMessage: einrideexamplesyntaxv1_Message | undefined; + forwardedEnum: einrideexamplesyntaxv1_Enum | undefined; +}; + +// Message +export type einrideexamplesyntaxv1_Message = { + // double + double: number | undefined; + // float + float: number | undefined; + // int32 + int32: number | undefined; + // int64 + int64: number | undefined; + // uint32 + uint32: number | undefined; + // uint64 + uint64: number | undefined; + // sint32 + sint32: number | undefined; + // sint64 + sint64: number | undefined; + // fixed32 + fixed32: number | undefined; + // fixed64 + fixed64: number | undefined; + // sfixed32 + sfixed32: number | undefined; + // sfixed64 + sfixed64: number | undefined; + // bool + bool: boolean | undefined; + // string + string: string | undefined; + // bytes + bytes: string | undefined; + // enum + enum: einrideexamplesyntaxv1_Enum | undefined; + // message + message: einrideexamplesyntaxv1_Message | undefined; + // optional double + optionalDouble?: number; + // optional float + optionalFloat?: number; + // optional int32 + optionalInt32?: number; + // optional int64 + optionalInt64?: number; + // optional uint32 + optionalUint32?: number; + // optional uint64 + optionalUint64?: number; + // optional sint32 + optionalSint32?: number; + // optional sint64 + optionalSint64?: number; + // optional fixed32 + optionalFixed32?: number; + // optional fixed64 + optionalFixed64?: number; + // optional sfixed32 + optionalSfixed32?: number; + // optional sfixed64 + optionalSfixed64?: number; + // optional bool + optionalBool?: boolean; + // optional string + optionalString?: string; + // optional bytes + optionalBytes?: string; + // optional enum + optionalEnum?: einrideexamplesyntaxv1_Enum; + // optional message + optionalMessage?: einrideexamplesyntaxv1_Message; + // repeated_double + repeatedDouble: number[] | undefined; + // repeated_float + repeatedFloat: number[] | undefined; + // repeated_int32 + repeatedInt32: number[] | undefined; + // repeated_int64 + repeatedInt64: number[] | undefined; + // repeated_uint32 + repeatedUint32: number[] | undefined; + // repeated_uint64 + repeatedUint64: number[] | undefined; + // repeated_sint32 + repeatedSint32: number[] | undefined; + // repeated_sint64 + repeatedSint64: number[] | undefined; + // repeated_fixed32 + repeatedFixed32: number[] | undefined; + // repeated_fixed64 + repeatedFixed64: number[] | undefined; + // repeated_sfixed32 + repeatedSfixed32: number[] | undefined; + // repeated_sfixed64 + repeatedSfixed64: number[] | undefined; + // repeated_bool + repeatedBool: boolean[] | undefined; + // repeated_string + repeatedString: string[] | undefined; + // repeated_bytes + repeatedBytes: string[] | undefined; + // repeated_enum + repeatedEnum: einrideexamplesyntaxv1_Enum[] | undefined; + // repeated_message + repeatedMessage: einrideexamplesyntaxv1_Message[] | undefined; + // map_string_string + mapStringString: { [key: string]: string } | undefined; + // map_string_message + mapStringMessage: { [key: string]: einrideexamplesyntaxv1_Message } | undefined; + // oneof_string + oneofString?: string; + // oneof_enum + oneofEnum?: einrideexamplesyntaxv1_Enum; + // oneof_message1 + oneofMessage1?: einrideexamplesyntaxv1_Message; + // oneof_message2 + oneofMessage2?: einrideexamplesyntaxv1_Message; + // any + any: wellKnownAny | undefined; + // repeated_any + repeatedAny: wellKnownAny[] | undefined; + // duration + duration: wellKnownDuration | undefined; + // repeated_duration + repeatedDuration: wellKnownDuration[] | undefined; + // empty + empty: wellKnownEmpty | undefined; + // repeated_empty + repeatedEmpty: wellKnownEmpty[] | undefined; + // field_mask + fieldMask: wellKnownFieldMask | undefined; + // repeated_field_mask + repeatedFieldMask: wellKnownFieldMask[] | undefined; + // struct + struct: wellKnownStruct | undefined; + // repeated_struct + repeatedStruct: wellKnownStruct[] | undefined; + // value + value: wellKnownValue | undefined; + // repeated_value + repeatedValue: wellKnownValue[] | undefined; + // null_value + nullValue: wellKnownNullValue | undefined; + // repeated_null_value + repeatedNullValue: wellKnownNullValue[] | undefined; + // list_value + listValue: wellKnownListValue | undefined; + // repeated_list_value + repeatedListValue: wellKnownListValue[] | undefined; + // bool_value + boolValue: wellKnownBoolValue | undefined; + // repeated_bool_value + repeatedBoolValue: wellKnownBoolValue[] | undefined; + // bytes_value + bytesValue: wellKnownBytesValue | undefined; + // repeated_bytes_value + repeatedBytesValue: wellKnownBytesValue[] | undefined; + // double_value + doubleValue: wellKnownDoubleValue | undefined; + // repeated_double_value + repeatedDoubleValue: wellKnownDoubleValue[] | undefined; + // float_value + floatValue: wellKnownFloatValue | undefined; + // repeated_float_value + repeatedFloatValue: wellKnownFloatValue[] | undefined; + // int32_value + int32Value: wellKnownInt32Value | undefined; + // repeated_int32_value + repeatedInt32Value: wellKnownInt32Value[] | undefined; + // int64_value + int64Value: wellKnownInt64Value | undefined; + // repeated_int64_value + repeatedInt64Value: wellKnownInt64Value[] | undefined; + // uint32_value + uint32Value: wellKnownUInt32Value | undefined; + // repeated_uint32_value + repeatedUint32Value: wellKnownUInt32Value[] | undefined; + // uint64_value + uint64Value: wellKnownUInt64Value | undefined; + // repeated_uint64_value + repeatedUint64Value: wellKnownUInt64Value[] | undefined; + // string_value + stringValue: wellKnownUInt64Value | undefined; + // repeated_string_value + repeatedStringValue: wellKnownStringValue[] | undefined; +}; + +// Enum +export type einrideexamplesyntaxv1_Enum = + // ENUM_UNSPECIFIED + | "ENUM_UNSPECIFIED" + // ENUM_ONE + | "ENUM_ONE" + // ENUM_TWO + | "ENUM_TWO"; +// If the Any contains a value that has a special JSON mapping, +// it will be converted as follows: +// {"@type": xxx, "value": yyy}. +// Otherwise, the value will be converted into a JSON object, +// and the "@type" field will be inserted to indicate the actual data type. +interface wellKnownAny { + "@type": string; + [key: string]: unknown; +} + +// Generated output always contains 0, 3, 6, or 9 fractional digits, +// depending on required precision, followed by the suffix "s". +// Accepted are any fractional digits (also none) as long as they fit +// into nano-seconds precision and the suffix "s" is required. +type wellKnownDuration = string; + +// An empty JSON object +type wellKnownEmpty = Record; + +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +type wellKnownFieldMask = string; + +// Any JSON value. +type wellKnownStruct = Record; + +type wellKnownValue = unknown; + +type wellKnownNullValue = null; + +type wellKnownListValue = wellKnownValue[]; + +type wellKnownBoolValue = boolean | null; + +type wellKnownBytesValue = string | null; + +type wellKnownDoubleValue = number | null; + +type wellKnownFloatValue = number | null; + +type wellKnownInt32Value = number | null; + +type wellKnownInt64Value = number | null; + +type wellKnownUInt32Value = number | null; + +type wellKnownUInt64Value = number | null; + +type wellKnownStringValue = string | null; + +// NestedMessage +export type einrideexamplesyntaxv1_Message_NestedMessage = { + // nested_message.string + string: string | undefined; +}; + +// NestedEnum +export type einrideexamplesyntaxv1_Message_NestedEnum = + // NESTEDENUM_UNSPECIFIED + "NESTEDENUM_UNSPECIFIED"; + +// @@protoc_insertion_point(typescript-http-eof) diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..18ee815 --- /dev/null +++ b/go.mod @@ -0,0 +1,11 @@ +module git.apinb.com/bsm-tools/protoc-gen-ts + +go 1.22 + +require ( + google.golang.org/genproto/googleapis/api v0.0.0-20240624140628-dc46fd24d27d + google.golang.org/protobuf v1.34.2 + gotest.tools/v3 v3.5.1 +) + +require github.com/google/go-cmp v0.5.9 // indirect diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..15ac3e7 --- /dev/null +++ b/go.sum @@ -0,0 +1,8 @@ +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +google.golang.org/genproto/googleapis/api v0.0.0-20240624140628-dc46fd24d27d h1:Aqf0fiIdUQEj0Gn9mKFFXoQfTTEaNopWpfVyYADxiSg= +google.golang.org/genproto/googleapis/api v0.0.0-20240624140628-dc46fd24d27d/go.mod h1:Od4k8V1LQSizPRUK4OzZ7TBE/20k+jPczUDAEyvn69Y= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= diff --git a/internal/codegen/file.go b/internal/codegen/file.go new file mode 100644 index 0000000..f28307c --- /dev/null +++ b/internal/codegen/file.go @@ -0,0 +1,21 @@ +package codegen + +import ( + "bytes" + "fmt" +) + +type File struct { + buf bytes.Buffer +} + +func (f *File) P(v ...interface{}) { + for _, x := range v { + fmt.Fprint(&f.buf, x) + } + fmt.Fprintln(&f.buf) +} + +func (f *File) Content() []byte { + return f.buf.Bytes() +} diff --git a/internal/httprule/fieldpath.go b/internal/httprule/fieldpath.go new file mode 100644 index 0000000..27c5170 --- /dev/null +++ b/internal/httprule/fieldpath.go @@ -0,0 +1,11 @@ +package httprule + +import "strings" + +// FieldPath describes the path for a field from a message. +// Individual segments are in snake case (same as in protobuf file). +type FieldPath []string + +func (f FieldPath) String() string { + return strings.Join(f, ".") +} diff --git a/internal/httprule/rule.go b/internal/httprule/rule.go new file mode 100644 index 0000000..766e501 --- /dev/null +++ b/internal/httprule/rule.go @@ -0,0 +1,94 @@ +package httprule + +import ( + "fmt" + "net/http" + + "google.golang.org/genproto/googleapis/api/annotations" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" +) + +func Get(m protoreflect.MethodDescriptor) (*annotations.HttpRule, bool) { + descriptor, ok := proto.GetExtension(m.Options(), annotations.E_Http).(*annotations.HttpRule) + if !ok || descriptor == nil { + return nil, false + } + return descriptor, true +} + +type Rule struct { + // The HTTP method to use. + Method string + // The template describing the URL to use. + Template Template + Body string + AdditionalRules []Rule +} + +func ParseRule(httpRule *annotations.HttpRule) (Rule, error) { + method, err := httpRuleMethod(httpRule) + if err != nil { + return Rule{}, err + } + url, err := httpRuleURL(httpRule) + if err != nil { + return Rule{}, err + } + template, err := ParseTemplate(url) + if err != nil { + return Rule{}, err + } + additional := make([]Rule, len(httpRule.GetAdditionalBindings())) + for i, r := range httpRule.GetAdditionalBindings() { + a, err := ParseRule(r) + if err != nil { + return Rule{}, fmt.Errorf("parse additional binding %d: %w", i, err) + } + additional[i] = a + } + return Rule{ + Method: method, + Template: template, + Body: httpRule.GetBody(), + AdditionalRules: additional, + }, nil +} + +func httpRuleURL(rule *annotations.HttpRule) (string, error) { + switch v := rule.GetPattern().(type) { + case *annotations.HttpRule_Get: + return v.Get, nil + case *annotations.HttpRule_Post: + return v.Post, nil + case *annotations.HttpRule_Delete: + return v.Delete, nil + case *annotations.HttpRule_Patch: + return v.Patch, nil + case *annotations.HttpRule_Put: + return v.Put, nil + case *annotations.HttpRule_Custom: + return v.Custom.GetPath(), nil + default: + return "", fmt.Errorf("http rule does not have an URL defined") + } +} + +func httpRuleMethod(rule *annotations.HttpRule) (string, error) { + switch v := rule.GetPattern().(type) { + case *annotations.HttpRule_Get: + return http.MethodGet, nil + case *annotations.HttpRule_Post: + return http.MethodPost, nil + case *annotations.HttpRule_Delete: + return http.MethodDelete, nil + case *annotations.HttpRule_Patch: + return http.MethodPatch, nil + case *annotations.HttpRule_Put: + return http.MethodPut, nil + case *annotations.HttpRule_Custom: + return v.Custom.GetKind(), nil + default: + return "", fmt.Errorf("http rule does not have an URL defined") + } +} diff --git a/internal/httprule/template.go b/internal/httprule/template.go new file mode 100644 index 0000000..2b0c884 --- /dev/null +++ b/internal/httprule/template.go @@ -0,0 +1,397 @@ +package httprule + +import "fmt" + +// Template represents a http path template. +// +// Example: `/v1/{name=books/*}:publish`. +type Template struct { + Segments []Segment + Verb string +} + +// Segment represents a single segment of a Template. +type Segment struct { + Kind SegmentKind + Literal string + Variable VariableSegment +} + +type SegmentKind int + +const ( + SegmentKindLiteral SegmentKind = iota + SegmentKindMatchSingle + SegmentKindMatchMultiple + SegmentKindVariable +) + +// VariableSegment represents a variable segment. +type VariableSegment struct { + FieldPath FieldPath + Segments []Segment +} + +func ParseTemplate(s string) (Template, error) { + p := &parser{ + content: s, + } + template, err := p.parse() + if err != nil { + return Template{}, err + } + if err := validate(template); err != nil { + return Template{}, err + } + return template, nil +} + +type parser struct { + content string + + // The next pos in content to read + pos int + // The currently read rune in content + tok rune +} + +func (p *parser) parse() (Template, error) { + // Grammar. + // Template = "/" Segments [ Verb ] ; + // Segments = Segment { "/" Segment } ; + // Segment = "*" | "**" | LITERAL | Variable ; + // Variable = "{" FieldPath [ "=" Segments ] "}" ; + // FieldPath = IDENT { "." IDENT } ; + // Verb = ":" LITERAL ;. + p.next() + if err := p.expect('/'); err != nil { + return Template{}, err + } + segments, err := p.parseSegments() + if err != nil { + return Template{}, err + } + var verb string + if p.tok == ':' { + v, err := p.parseVerb() + if err != nil { + return Template{}, err + } + verb = v + } + if p.tok != -1 { + return Template{}, fmt.Errorf("expected EOF, got %q", p.tok) + } + return Template{ + Segments: segments, + Verb: verb, + }, nil +} + +func (p *parser) parseSegments() ([]Segment, error) { + seg, err := p.parseSegment() + if err != nil { + return nil, err + } + if p.tok == '/' { + p.next() + rest, err := p.parseSegments() + if err != nil { + return nil, err + } + return append([]Segment{seg}, rest...), nil + } + return []Segment{seg}, nil +} + +func (p *parser) parseSegment() (Segment, error) { + switch { + case p.tok == '*' && p.peek() == '*': + return p.parseMatchMultipleSegment(), nil + case p.tok == '*': + return p.parseMatchSingleSegment(), nil + case p.tok == '{': + return p.parseVariableSegment() + default: + return p.parseLiteralSegment() + } +} + +func (p *parser) parseMatchMultipleSegment() Segment { + p.next() + p.next() + return Segment{ + Kind: SegmentKindMatchMultiple, + } +} + +func (p *parser) parseMatchSingleSegment() Segment { + p.next() + return Segment{ + Kind: SegmentKindMatchSingle, + } +} + +func (p *parser) parseLiteralSegment() (Segment, error) { + lit, err := p.parseLiteral() + if err != nil { + return Segment{}, err + } + return Segment{ + Kind: SegmentKindLiteral, + Literal: lit, + }, nil +} + +func (p *parser) parseVariableSegment() (Segment, error) { + if err := p.expect('{'); err != nil { + return Segment{}, err + } + fieldPath, err := p.parseFieldPath() + if err != nil { + return Segment{}, err + } + segments := []Segment{ + {Kind: SegmentKindMatchSingle}, + } + if p.tok == '=' { + p.next() + s, err := p.parseSegments() + if err != nil { + return Segment{}, err + } + segments = s + } + if err := p.expect('}'); err != nil { + return Segment{}, err + } + return Segment{ + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: fieldPath, + Segments: segments, + }, + }, nil +} + +func (p *parser) parseVerb() (string, error) { + if err := p.expect(':'); err != nil { + return "", err + } + return p.parseLiteral() +} + +func (p *parser) parseFieldPath() ([]string, error) { + fp, err := p.parseIdent() + if err != nil { + return nil, err + } + if p.tok == '.' { + p.next() + rest, err := p.parseFieldPath() + if err != nil { + return nil, err + } + return append([]string{fp}, rest...), nil + } + return []string{fp}, nil +} + +// parseLiteral consumes input as long as next token(s) belongs to pchars, as defined in RFC3986. +// Returns an error if not literal is found. +// +// https://www.ietf.org/rfc/rfc3986.txt, P.49 +// +// pchar = unreserved / pct-encoded / sub-delims / ":" / "@" +// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" +// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" +// / "*" / "+" / "," / ";" / "=" +// pct-encoded = "%" HEXDIG HEXDIG +func (p *parser) parseLiteral() (string, error) { + var literal []rune + startPos := p.pos + for { + if isSingleCharPChar(p.tok) { + literal = append(literal, p.tok) + p.next() + continue + } + if p.tok == '%' && isHexDigit(p.peekN(1)) && isHexDigit(p.peekN(2)) { + literal = append(literal, p.tok) + p.next() + literal = append(literal, p.tok) + p.next() + literal = append(literal, p.tok) + p.next() + continue + } + break + } + if len(literal) == 0 { + return "", fmt.Errorf("expected literal at position %d, found %s", startPos-1, p.tokenString()) + } + return string(literal), nil +} + +func (p *parser) parseIdent() (string, error) { + var ident []rune + startPos := p.pos + for { + if isAlpha(p.tok) || isDigit(p.tok) || p.tok == '_' { + ident = append(ident, p.tok) + p.next() + continue + } + break + } + if len(ident) == 0 { + return "", fmt.Errorf("expected identifier at position %d, found %s", startPos-1, p.tokenString()) + } + return string(ident), nil +} + +func (p *parser) next() { + if p.pos < len(p.content) { + p.tok = rune(p.content[p.pos]) + p.pos++ + } else { + p.tok = -1 + p.pos = len(p.content) + } +} + +func (p parser) tokenString() string { + if p.tok == -1 { + return "EOF" + } + return fmt.Sprintf("%q", p.tok) +} + +func (p *parser) peek() rune { + return p.peekN(1) +} + +func (p *parser) peekN(n int) rune { + if offset := p.pos + n - 1; offset < len(p.content) { + return rune(p.content[offset]) + } + return -1 +} + +func (p *parser) expect(r rune) error { + if p.tok != r { + return fmt.Errorf("expected token %q at position %d, found %s", r, p.pos, p.tokenString()) + } + p.next() + return nil +} + +// https://www.ietf.org/rfc/rfc3986.txt, P.49 +// +// pchar = unreserved / pct-encoded / sub-delims / ":" / "@" +// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" +// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" +// / "*" / "+" / "," / ";" / "=" +// pct-encoded = "%" HEXDIG HEXDIG +func isSingleCharPChar(r rune) bool { + if isAlpha(r) || isDigit(r) { + return true + } + switch r { + case '@', '-', '.', '_', '~', '!', + '$', '&', '\'', '(', ')', '*', '+', + ',', ';', '=': // ':' + return true + } + return false +} + +func isAlpha(r rune) bool { + return ('A' <= r && r <= 'Z') || ('a' <= r && r <= 'z') +} + +func isDigit(r rune) bool { + return '0' <= r && r <= '9' +} + +func isHexDigit(r rune) bool { + switch { + case '0' <= r && r <= '9': + return true + case 'A' <= r && r <= 'F': + return true + case 'a' <= r && r <= 'f': + return true + } + return false +} + +// validate validates parts of the template that are +// allowed by the grammar, but disallowed in practice. +// +// - nested variable segments +// - '**' for segments other than the last. +func validate(t Template) error { + // check for nested variable segments + for _, s1 := range t.Segments { + if s1.Kind != SegmentKindVariable { + continue + } + for _, s2 := range s1.Variable.Segments { + if s2.Kind == SegmentKindVariable { + return fmt.Errorf("nested variable segment is not allowed") + } + } + } + // check for '**' that are not the last part of the template + for i, s := range t.Segments { + if i == len(t.Segments)-1 { + continue + } + if s.Kind == SegmentKindMatchMultiple { + return fmt.Errorf("'**' only allowed as last part of template") + } + if s.Kind == SegmentKindVariable { + for _, s2 := range s.Variable.Segments { + if s2.Kind == SegmentKindMatchMultiple { + return fmt.Errorf("'**' only allowed as last part of template") + } + } + } + } + // check for variable where '**' is not last part + for _, s := range t.Segments { + if s.Kind != SegmentKindVariable { + continue + } + for i, s2 := range s.Variable.Segments { + if i == len(s.Variable.Segments)-1 { + continue + } + if s2.Kind == SegmentKindMatchMultiple { + return fmt.Errorf("'**' only allowed as the last part of the template") + } + } + } + // check for top level expansions + for _, s := range t.Segments { + if s.Kind == SegmentKindMatchSingle { + return fmt.Errorf("'*' must only be used in variables") + } + if s.Kind == SegmentKindMatchMultiple { + return fmt.Errorf("'**' must only be used in variables") + } + } + // check for duplicate variable bindings + seen := make(map[string]struct{}) + for _, s := range t.Segments { + if s.Kind == SegmentKindVariable { + field := s.Variable.FieldPath.String() + if _, ok := seen[s.Variable.FieldPath.String()]; ok { + return fmt.Errorf("variable '%s' bound multiple times", field) + } + seen[field] = struct{}{} + } + } + return nil +} diff --git a/internal/httprule/template_test.go b/internal/httprule/template_test.go new file mode 100644 index 0000000..14929a2 --- /dev/null +++ b/internal/httprule/template_test.go @@ -0,0 +1,178 @@ +package httprule + +import ( + "testing" + + "gotest.tools/v3/assert" +) + +func Test_ParseTemplate(t *testing.T) { + t.Parallel() + for _, tt := range []struct { + input string + path Template + }{ + { + input: "/v1/messages", + path: Template{ + Segments: []Segment{ + {Kind: SegmentKindLiteral, Literal: "v1"}, + {Kind: SegmentKindLiteral, Literal: "messages"}, + }, + }, + }, + { + input: "/v1/messages:peek", + path: Template{ + Segments: []Segment{ + {Kind: SegmentKindLiteral, Literal: "v1"}, + {Kind: SegmentKindLiteral, Literal: "messages"}, + }, + Verb: "peek", + }, + }, + { + input: "/{id}", + path: Template{ + Segments: []Segment{ + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"id"}, + Segments: []Segment{ + {Kind: SegmentKindMatchSingle}, + }, + }, + }, + }, + }, + }, + { + input: "/{message.id}", + path: Template{ + Segments: []Segment{ + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"message", "id"}, + Segments: []Segment{ + {Kind: SegmentKindMatchSingle}, + }, + }, + }, + }, + }, + }, + { + input: "/{id=messages/*}", + path: Template{ + Segments: []Segment{ + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"id"}, + Segments: []Segment{ + {Kind: SegmentKindLiteral, Literal: "messages"}, + {Kind: SegmentKindMatchSingle}, + }, + }, + }, + }, + }, + }, + { + input: "/{id=messages/*/threads/*}", + path: Template{ + Segments: []Segment{ + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"id"}, + Segments: []Segment{ + {Kind: SegmentKindLiteral, Literal: "messages"}, + {Kind: SegmentKindMatchSingle}, + {Kind: SegmentKindLiteral, Literal: "threads"}, + {Kind: SegmentKindMatchSingle}, + }, + }, + }, + }, + }, + }, + { + input: "/{id=**}", + path: Template{ + Segments: []Segment{ + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"id"}, + Segments: []Segment{ + {Kind: SegmentKindMatchMultiple}, + }, + }, + }, + }, + }, + }, + { + input: "/v1/messages/{message}/threads/{thread}", + path: Template{ + Segments: []Segment{ + {Kind: SegmentKindLiteral, Literal: "v1"}, + {Kind: SegmentKindLiteral, Literal: "messages"}, + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"message"}, + Segments: []Segment{ + {Kind: SegmentKindMatchSingle}, + }, + }, + }, + {Kind: SegmentKindLiteral, Literal: "threads"}, + { + Kind: SegmentKindVariable, + Variable: VariableSegment{ + FieldPath: []string{"thread"}, + Segments: []Segment{ + {Kind: SegmentKindMatchSingle}, + }, + }, + }, + }, + }, + }, + } { + t.Run(tt.input, func(t *testing.T) { + t.Parallel() + got, err := ParseTemplate(tt.input) + assert.NilError(t, err) + assert.DeepEqual(t, tt.path, got) + }) + } +} + +func Test_ParseTemplate_Invalid(t *testing.T) { + t.Parallel() + for _, tt := range []struct { + template string + expected string + }{ + {template: "", expected: "expected token '/' at position 0, found EOF"}, + {template: "//", expected: "expected literal at position 1, found '/'"}, + {template: "/v1:", expected: "expected literal at position 3, found EOF"}, + {template: "/v1/:", expected: "expected literal at position 4, found ':'"}, + {template: "/{name=messages/{id}}", expected: "nested variable segment is not allowed"}, + {template: "/**/*", expected: "'**' only allowed as last part of template"}, + {template: "/v1/messages/*", expected: "'*' must only be used in variables"}, + {template: "/v1/{id}/{id}", expected: "variable 'id' bound multiple times"}, + } { + t.Run(tt.template, func(t *testing.T) { + t.Parallel() + _, err := ParseTemplate(tt.template) + assert.Check(t, err != nil) + assert.ErrorContains(t, err, tt.expected) + }) + } +} diff --git a/internal/plugin/commentgen.go b/internal/plugin/commentgen.go new file mode 100644 index 0000000..3a1c12a --- /dev/null +++ b/internal/plugin/commentgen.go @@ -0,0 +1,59 @@ +package plugin + +import ( + "strings" + + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/codegen" + "google.golang.org/genproto/googleapis/api/annotations" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type commentGenerator struct { + descriptor protoreflect.Descriptor +} + +func (c commentGenerator) generateLeading(f *codegen.File, indent int) { + loc := c.descriptor.ParentFile().SourceLocations().ByDescriptor(c.descriptor) + var comments string + if loc.TrailingComments != "" { + comments = comments + loc.TrailingComments + } + if loc.LeadingComments != "" { + comments = comments + loc.LeadingComments + } + lines := strings.Split(comments, "\n") + for _, line := range lines { + if line == "" { + continue + } + f.P(t(indent), "/** "+strings.TrimSpace(line)+" */ ") + } + if field, ok := c.descriptor.(protoreflect.FieldDescriptor); ok { + if behaviorComment := fieldBehaviorComment(field); len(behaviorComment) > 0 { + f.P(t(indent), "/** "+behaviorComment+" */") + } + } +} + +func fieldBehaviorComment(field protoreflect.FieldDescriptor) string { + behaviors := getFieldBehaviors(field) + if len(behaviors) == 0 { + return "" + } + + behaviorStrings := make([]string, 0, len(behaviors)) + for _, b := range behaviors { + behaviorStrings = append(behaviorStrings, b.String()) + } + return "Behaviors: " + strings.Join(behaviorStrings, ", ") +} + +func getFieldBehaviors(field protoreflect.FieldDescriptor) []annotations.FieldBehavior { + if behaviors, ok := proto.GetExtension( + field.Options(), annotations.E_FieldBehavior, + ).([]annotations.FieldBehavior); ok { + return behaviors + } + return nil +} diff --git a/internal/plugin/enumgen.go b/internal/plugin/enumgen.go new file mode 100644 index 0000000..73f395d --- /dev/null +++ b/internal/plugin/enumgen.go @@ -0,0 +1,31 @@ +package plugin + +import ( + "strconv" + + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/codegen" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type enumGenerator struct { + pkg protoreflect.FullName + enum protoreflect.EnumDescriptor +} + +func (e enumGenerator) Generate(f *codegen.File) { + commentGenerator{descriptor: e.enum}.generateLeading(f, 0) + f.P("export type ", scopedDescriptorTypeName(e.pkg, e.enum), " =") + if e.enum.Values().Len() == 1 { + commentGenerator{descriptor: e.enum.Values().Get(0)}.generateLeading(f, 1) + f.P(t(1), strconv.Quote(string(e.enum.Values().Get(0).Name())), ";") + return + } + rangeEnumValues(e.enum, func(value protoreflect.EnumValueDescriptor, last bool) { + commentGenerator{descriptor: value}.generateLeading(f, 1) + if last { + f.P(t(1), "| ", strconv.Quote(string(value.Name())), ";") + } else { + f.P(t(1), "| ", strconv.Quote(string(value.Name()))) + } + }) +} diff --git a/internal/plugin/generate.go b/internal/plugin/generate.go new file mode 100644 index 0000000..31fb7fc --- /dev/null +++ b/internal/plugin/generate.go @@ -0,0 +1,52 @@ +package plugin + +import ( + "fmt" + "path" + "strings" + + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/codegen" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protodesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/types/descriptorpb" + "google.golang.org/protobuf/types/pluginpb" +) + +func Generate(request *pluginpb.CodeGeneratorRequest) (*pluginpb.CodeGeneratorResponse, error) { + generate := make(map[string]struct{}) + registry, err := protodesc.NewFiles(&descriptorpb.FileDescriptorSet{ + File: request.GetProtoFile(), + }) + if err != nil { + return nil, fmt.Errorf("create proto registry: %w", err) + } + for _, f := range request.GetFileToGenerate() { + generate[f] = struct{}{} + } + packaged := make(map[protoreflect.FullName][]protoreflect.FileDescriptor) + for _, f := range request.GetFileToGenerate() { + file, err := registry.FindFileByPath(f) + if err != nil { + return nil, fmt.Errorf("find file %s: %w", f, err) + } + packaged[file.Package()] = append(packaged[file.Package()], file) + } + + var res pluginpb.CodeGeneratorResponse + for pkg, files := range packaged { + var index codegen.File + indexPathElems := append(strings.Split(string(pkg), "."), "index.ts") + if err := (packageGenerator{pkg: pkg, files: files}).Generate(&index); err != nil { + return nil, fmt.Errorf("generate package '%s': %w", pkg, err) + } + index.P() + index.P("// @@protoc_insertion_point(typescript-http-eof)") + res.File = append(res.File, &pluginpb.CodeGeneratorResponse_File{ + Name: proto.String(path.Join(indexPathElems...)), + Content: proto.String(string(index.Content())), + }) + } + res.SupportedFeatures = proto.Uint64(uint64(pluginpb.CodeGeneratorResponse_FEATURE_PROTO3_OPTIONAL)) + return &res, nil +} diff --git a/internal/plugin/helpers.go b/internal/plugin/helpers.go new file mode 100644 index 0000000..40cb040 --- /dev/null +++ b/internal/plugin/helpers.go @@ -0,0 +1,58 @@ +package plugin + +import ( + "strings" + + "google.golang.org/protobuf/reflect/protoreflect" +) + +func scopedDescriptorTypeName(pkg protoreflect.FullName, desc protoreflect.Descriptor) string { + name := string(desc.Name()) + var prefix string + if desc.Parent() != desc.ParentFile() { + prefix = descriptorTypeName(desc.Parent()) + "_" + } + if desc.ParentFile().Package() != pkg { + prefix = packagePrefix(desc.ParentFile().Package()) + prefix + } + return prefix + name +} + +func descriptorTypeName(desc protoreflect.Descriptor) string { + name := string(desc.Name()) + var prefix string + if desc.Parent() != desc.ParentFile() { + prefix = descriptorTypeName(desc.Parent()) + "_" + } + return prefix + name +} + +func packagePrefix(pkg protoreflect.FullName) string { + return strings.Join(strings.Split(string(pkg), "."), "") + "_" +} + +func rangeFields(message protoreflect.MessageDescriptor, f func(field protoreflect.FieldDescriptor)) { + for i := 0; i < message.Fields().Len(); i++ { + f(message.Fields().Get(i)) + } +} + +func rangeMethods(methods protoreflect.MethodDescriptors, f func(method protoreflect.MethodDescriptor)) { + for i := 0; i < methods.Len(); i++ { + f(methods.Get(i)) + } +} + +func rangeEnumValues(enum protoreflect.EnumDescriptor, f func(value protoreflect.EnumValueDescriptor, last bool)) { + for i := 0; i < enum.Values().Len(); i++ { + if i == enum.Values().Len()-1 { + f(enum.Values().Get(i), true) + } else { + f(enum.Values().Get(i), false) + } + } +} + +func t(n int) string { + return strings.Repeat(" ", n) +} diff --git a/internal/plugin/jsonleafwalk.go b/internal/plugin/jsonleafwalk.go new file mode 100644 index 0000000..4d356ca --- /dev/null +++ b/internal/plugin/jsonleafwalk.go @@ -0,0 +1,48 @@ +package plugin + +import ( + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/httprule" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type jsonLeafWalkFunc func(path httprule.FieldPath, field protoreflect.FieldDescriptor) + +func walkJSONLeafFields(message protoreflect.MessageDescriptor, f jsonLeafWalkFunc) { + var w jsonWalker + w.walkMessage(nil, message, f) +} + +type jsonWalker struct { + seen map[protoreflect.FullName]struct{} +} + +func (w *jsonWalker) enter(name protoreflect.FullName) bool { + if _, ok := w.seen[name]; ok { + return false + } + if w.seen == nil { + w.seen = make(map[protoreflect.FullName]struct{}) + } + w.seen[name] = struct{}{} + return true +} + +func (w *jsonWalker) walkMessage(path httprule.FieldPath, message protoreflect.MessageDescriptor, f jsonLeafWalkFunc) { + if w.enter(message.FullName()) { + for i := 0; i < message.Fields().Len(); i++ { + field := message.Fields().Get(i) + p := append(httprule.FieldPath{}, path...) + p = append(p, string(field.Name())) + switch { + case !field.IsMap() && field.Kind() == protoreflect.MessageKind: + if IsWellKnownType(field.Message()) { + f(p, field) + } else { + w.walkMessage(p, field.Message(), f) + } + default: + f(p, field) + } + } + } +} diff --git a/internal/plugin/messagegen.go b/internal/plugin/messagegen.go new file mode 100644 index 0000000..7cd3bc4 --- /dev/null +++ b/internal/plugin/messagegen.go @@ -0,0 +1,24 @@ +package plugin + +import ( + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/codegen" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type messageGenerator struct { + pkg protoreflect.FullName + message protoreflect.MessageDescriptor +} + +func (m messageGenerator) Generate(f *codegen.File) { + commentGenerator{descriptor: m.message}.generateLeading(f, 0) + f.P("export type ", scopedDescriptorTypeName(m.pkg, m.message), " = {") + rangeFields(m.message, func(field protoreflect.FieldDescriptor) { + commentGenerator{descriptor: field}.generateLeading(f, 1) + fieldType := typeFromField(m.pkg, field) + f.P(t(1), field.JSONName(), "?: ", fieldType.Reference(), ";") + }) + + f.P("};") + f.P() +} diff --git a/internal/plugin/packagegen.go b/internal/plugin/packagegen.go new file mode 100644 index 0000000..535a108 --- /dev/null +++ b/internal/plugin/packagegen.go @@ -0,0 +1,51 @@ +package plugin + +import ( + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/codegen" + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/protowalk" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type packageGenerator struct { + pkg protoreflect.FullName + files []protoreflect.FileDescriptor +} + +func (p packageGenerator) Generate(f *codegen.File) error { + p.generateHeader(f) + var seenService bool + var walkErr error + protowalk.WalkFiles(p.files, func(desc protoreflect.Descriptor) bool { + if wkt, ok := WellKnownType(desc); ok { + f.P(wkt.TypeDeclaration()) + return false + } + switch v := desc.(type) { + case protoreflect.MessageDescriptor: + if v.IsMapEntry() { + return false + } + messageGenerator{pkg: p.pkg, message: v}.Generate(f) + case protoreflect.EnumDescriptor: + enumGenerator{pkg: p.pkg, enum: v}.Generate(f) + case protoreflect.ServiceDescriptor: + if err := (serviceGenerator{pkg: p.pkg, service: v, genHandler: !seenService}).Generate(f); err != nil { + walkErr = err + return false + } + seenService = true + } + return true + }) + if walkErr != nil { + return walkErr + } + return nil +} + +func (p packageGenerator) generateHeader(f *codegen.File) { + f.P("// Code generated by protoc-gen-typescript-http. DO NOT EDIT.") + f.P("/* eslint-disable camelcase */") + f.P("// @ts-nocheck") + f.P() +} diff --git a/internal/plugin/servicegen.go b/internal/plugin/servicegen.go new file mode 100644 index 0000000..13f1c2c --- /dev/null +++ b/internal/plugin/servicegen.go @@ -0,0 +1,237 @@ +package plugin + +import ( + "fmt" + "strconv" + "strings" + + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/codegen" + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/httprule" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type serviceGenerator struct { + pkg protoreflect.FullName + genHandler bool + service protoreflect.ServiceDescriptor +} + +func (s serviceGenerator) Generate(f *codegen.File) error { + s.generateInterface(f) + if s.genHandler { + s.generateHandler(f) + } + return s.generateClient(f) +} + +func (s serviceGenerator) generateInterface(f *codegen.File) { + commentGenerator{descriptor: s.service}.generateLeading(f, 0) + f.P("export interface ", descriptorTypeName(s.service), " {") + rangeMethods(s.service.Methods(), func(method protoreflect.MethodDescriptor) { + if !supportedMethod(method) { + return + } + commentGenerator{descriptor: method}.generateLeading(f, 1) + input := typeFromMessage(s.pkg, method.Input()) + output := typeFromMessage(s.pkg, method.Output()) + f.P(t(1), method.Name(), "(request: ", input.Reference(), "): Promise<", output.Reference(), ">;") + }) + f.P("}") + f.P() +} + +func (s serviceGenerator) generateHandler(f *codegen.File) { + f.P("type RequestType = {") + f.P(t(1), "path: string;") + f.P(t(1), "method: string;") + f.P(t(1), "body: string | null;") + f.P("};") + f.P() + f.P("type RequestHandler = (request: RequestType, meta: { service: string, method: string }) => Promise;") + f.P() +} + +func (s serviceGenerator) generateClient(f *codegen.File) error { + f.P( + "export function create", + descriptorTypeName(s.service), + "Client(", + "\n", + t(1), + "handler: RequestHandler", + "\n", + "): ", + descriptorTypeName(s.service), + " {", + ) + f.P(t(1), "return {") + var methodErr error + rangeMethods(s.service.Methods(), func(method protoreflect.MethodDescriptor) { + if err := s.generateMethod(f, method); err != nil { + methodErr = fmt.Errorf("generate method %s: %w", method.Name(), err) + } + }) + if methodErr != nil { + return methodErr + } + f.P(t(1), "};") + f.P("}") + return nil +} + +func (s serviceGenerator) generateMethod(f *codegen.File, method protoreflect.MethodDescriptor) error { + outputType := typeFromMessage(s.pkg, method.Output()) + r, ok := httprule.Get(method) + if !ok { + return nil + } + rule, err := httprule.ParseRule(r) + if err != nil { + return fmt.Errorf("parse http rule: %w", err) + } + f.P(t(2), method.Name(), "(request) { // eslint-disable-line @typescript-eslint/no-unused-vars") + s.generateMethodPathValidation(f, method.Input(), rule) + s.generateMethodPath(f, method.Input(), rule) + s.generateMethodBody(f, method.Input(), rule) + s.generateMethodQuery(f, method.Input(), rule) + f.P(t(3), "let uri = path;") + f.P(t(3), "if (queryParams.length > 0) {") + f.P(t(4), "uri += `?${queryParams.join(\"&\")}`") + f.P(t(3), "}") + f.P(t(3), "return handler({") + f.P(t(4), "path: uri,") + f.P(t(4), "method: ", strconv.Quote(rule.Method), ",") + f.P(t(4), "body,") + f.P(t(3), "}, {") + f.P(t(4), "service: \"", method.Parent().Name(), "\",") + f.P(t(4), "method: \"", method.Name(), "\",") + f.P(t(3), "}) as Promise<", outputType.Reference(), ">;") + f.P(t(2), "},") + return nil +} + +func (s serviceGenerator) generateMethodPathValidation( + f *codegen.File, + input protoreflect.MessageDescriptor, + rule httprule.Rule, +) { + for _, seg := range rule.Template.Segments { + if seg.Kind != httprule.SegmentKindVariable { + continue + } + fp := seg.Variable.FieldPath + nullPath := nullPropagationPath(fp, input) + protoPath := strings.Join(fp, ".") + errMsg := "missing required field request." + protoPath + f.P(t(3), "if (!request.", nullPath, ") {") + f.P(t(4), "throw new Error(", strconv.Quote(errMsg), ");") + f.P(t(3), "}") + } +} + +func (s serviceGenerator) generateMethodPath( + f *codegen.File, + input protoreflect.MessageDescriptor, + rule httprule.Rule, +) { + pathParts := make([]string, 0, len(rule.Template.Segments)) + for _, seg := range rule.Template.Segments { + switch seg.Kind { + case httprule.SegmentKindVariable: + fieldPath := jsonPath(seg.Variable.FieldPath, input) + pathParts = append(pathParts, "${request."+fieldPath+"}") + case httprule.SegmentKindLiteral: + pathParts = append(pathParts, seg.Literal) + case httprule.SegmentKindMatchSingle: // TODO: Double check this and following case + pathParts = append(pathParts, "*") + case httprule.SegmentKindMatchMultiple: + pathParts = append(pathParts, "**") + } + } + path := strings.Join(pathParts, "/") + if rule.Template.Verb != "" { + path += ":" + rule.Template.Verb + } + f.P(t(3), "const path = `", path, "`; // eslint-disable-line quotes") +} + +func (s serviceGenerator) generateMethodBody( + f *codegen.File, + input protoreflect.MessageDescriptor, + rule httprule.Rule, +) { + switch { + case rule.Body == "": + f.P(t(3), "const body = null;") + case rule.Body == "*": + f.P(t(3), "const body = JSON.stringify(request);") + default: + nullPath := nullPropagationPath(httprule.FieldPath{rule.Body}, input) + f.P(t(3), "const body = JSON.stringify(request?.", nullPath, " ?? {});") + } +} + +func (s serviceGenerator) generateMethodQuery( + f *codegen.File, + input protoreflect.MessageDescriptor, + rule httprule.Rule, +) { + f.P(t(3), "const queryParams: string[] = [];") + // nothing in query + if rule.Body == "*" { + return + } + // fields covered by path + pathCovered := make(map[string]struct{}) + for _, segment := range rule.Template.Segments { + if segment.Kind != httprule.SegmentKindVariable { + continue + } + pathCovered[segment.Variable.FieldPath.String()] = struct{}{} + } + walkJSONLeafFields(input, func(path httprule.FieldPath, field protoreflect.FieldDescriptor) { + if _, ok := pathCovered[path.String()]; ok { + return + } + if rule.Body != "" && path[0] == rule.Body { + return + } + nullPath := nullPropagationPath(path, input) + jp := jsonPath(path, input) + f.P(t(3), "if (request.", nullPath, ") {") + switch { + case field.IsList(): + f.P(t(4), "request.", jp, ".forEach((x) => {") + f.P(t(5), "queryParams.push(`", jp, "=${encodeURIComponent(x.toString())}`)") + f.P(t(4), "})") + default: + f.P(t(4), "queryParams.push(`", jp, "=${encodeURIComponent(request.", jp, ".toString())}`)") + } + f.P(t(3), "}") + }) +} + +func supportedMethod(method protoreflect.MethodDescriptor) bool { + _, ok := httprule.Get(method) + return ok && !method.IsStreamingClient() && !method.IsStreamingServer() +} + +func jsonPath(path httprule.FieldPath, message protoreflect.MessageDescriptor) string { + return strings.Join(jsonPathSegments(path, message), ".") +} + +func nullPropagationPath(path httprule.FieldPath, message protoreflect.MessageDescriptor) string { + return strings.Join(jsonPathSegments(path, message), "?.") +} + +func jsonPathSegments(path httprule.FieldPath, message protoreflect.MessageDescriptor) []string { + segs := make([]string, len(path)) + for i, p := range path { + field := message.Fields().ByName(protoreflect.Name(p)) + segs[i] = field.JSONName() + if i < len(path) { + message = field.Message() + } + } + return segs +} diff --git a/internal/plugin/type.go b/internal/plugin/type.go new file mode 100644 index 0000000..94ef602 --- /dev/null +++ b/internal/plugin/type.go @@ -0,0 +1,82 @@ +package plugin + +import "google.golang.org/protobuf/reflect/protoreflect" + +type Type struct { + IsNamed bool + Name string + + IsList bool + IsMap bool + Underlying *Type +} + +func (t Type) Reference() string { + switch { + case t.IsMap: + return "{ [key: string]: " + t.Underlying.Reference() + " }" + case t.IsList: + return t.Underlying.Reference() + "[]" + default: + return t.Name + } +} + +func typeFromField(pkg protoreflect.FullName, field protoreflect.FieldDescriptor) Type { + switch { + case field.IsMap(): + underlying := namedTypeFromField(pkg, field.MapValue()) + return Type{ + IsMap: true, + Underlying: &underlying, + } + case field.IsList(): + underlying := namedTypeFromField(pkg, field) + return Type{ + IsList: true, + Underlying: &underlying, + } + default: + return namedTypeFromField(pkg, field) + } +} + +func namedTypeFromField(pkg protoreflect.FullName, field protoreflect.FieldDescriptor) Type { + switch field.Kind() { + case protoreflect.StringKind, protoreflect.BytesKind: + return Type{IsNamed: true, Name: "string"} + case protoreflect.BoolKind: + return Type{IsNamed: true, Name: "boolean"} + case + protoreflect.Int32Kind, + protoreflect.Int64Kind, + protoreflect.Uint32Kind, + protoreflect.Uint64Kind, + protoreflect.DoubleKind, + protoreflect.Fixed32Kind, + protoreflect.Fixed64Kind, + protoreflect.Sfixed32Kind, + protoreflect.Sfixed64Kind, + protoreflect.Sint32Kind, + protoreflect.Sint64Kind, + protoreflect.FloatKind: + return Type{IsNamed: true, Name: "number"} + case protoreflect.MessageKind: + return typeFromMessage(pkg, field.Message()) + case protoreflect.EnumKind: + desc := field.Enum() + if wkt, ok := WellKnownType(field.Enum()); ok { + return Type{IsNamed: true, Name: wkt.Name()} + } + return Type{IsNamed: true, Name: scopedDescriptorTypeName(pkg, desc)} + default: + return Type{IsNamed: true, Name: "unknown"} + } +} + +func typeFromMessage(pkg protoreflect.FullName, message protoreflect.MessageDescriptor) Type { + if wkt, ok := WellKnownType(message); ok { + return Type{IsNamed: true, Name: wkt.Name()} + } + return Type{IsNamed: true, Name: scopedDescriptorTypeName(pkg, message)} +} diff --git a/internal/plugin/wellknown.go b/internal/plugin/wellknown.go new file mode 100644 index 0000000..6a8cf00 --- /dev/null +++ b/internal/plugin/wellknown.go @@ -0,0 +1,157 @@ +package plugin + +import ( + "strings" + + "google.golang.org/protobuf/reflect/protoreflect" +) + +const ( + wellKnownPrefix = "google.protobuf." +) + +type WellKnown string + +// https://developers.google.com/protocol-buffers/docs/reference/google.protobuf +const ( + WellKnownAny WellKnown = "google.protobuf.Any" + WellKnownDuration WellKnown = "google.protobuf.Duration" + WellKnownEmpty WellKnown = "google.protobuf.Empty" + WellKnownFieldMask WellKnown = "google.protobuf.FieldMask" + WellKnownStruct WellKnown = "google.protobuf.Struct" + WellKnownTimestamp WellKnown = "google.protobuf.Timestamp" + + // Wrapper types. + WellKnownFloatValue WellKnown = "google.protobuf.FloatValue" + WellKnownInt64Value WellKnown = "google.protobuf.Int64Value" + WellKnownInt32Value WellKnown = "google.protobuf.Int32Value" + WellKnownUInt64Value WellKnown = "google.protobuf.UInt64Value" + WellKnownUInt32Value WellKnown = "google.protobuf.UInt32Value" + WellKnownBytesValue WellKnown = "google.protobuf.BytesValue" + WellKnownDoubleValue WellKnown = "google.protobuf.DoubleValue" + WellKnownBoolValue WellKnown = "google.protobuf.BoolValue" + WellKnownStringValue WellKnown = "google.protobuf.StringValue" + + // Descriptor types. + WellKnownValue WellKnown = "google.protobuf.Value" + WellKnownNullValue WellKnown = "google.protobuf.NullValue" + WellKnownListValue WellKnown = "google.protobuf.ListValue" +) + +func IsWellKnownType(desc protoreflect.Descriptor) bool { + switch desc.(type) { + case protoreflect.MessageDescriptor, protoreflect.EnumDescriptor: + return strings.HasPrefix(string(desc.FullName()), wellKnownPrefix) + default: + return false + } +} + +func WellKnownType(desc protoreflect.Descriptor) (WellKnown, bool) { + if !IsWellKnownType(desc) { + return "", false + } + return WellKnown(desc.FullName()), true +} + +func (wkt WellKnown) Name() string { + return "wellKnown" + strings.TrimPrefix(string(wkt), wellKnownPrefix) +} + +func (wkt WellKnown) TypeDeclaration() string { + var w writer + switch wkt { + case WellKnownAny: + w.P("// If the Any contains a value that has a special JSON mapping,") + w.P("// it will be converted as follows:") + w.P("// {\"@type\": xxx, \"value\": yyy}.") + w.P("// Otherwise, the value will be converted into a JSON object,") + w.P("// and the \"@type\" field will be inserted to indicate the actual data type.") + w.P("interface ", wkt.Name(), " {") + w.P(" ", "\"@type\": string;") + w.P(" [key: string]: unknown;") + w.P("}") + case WellKnownDuration: + w.P("// Generated output always contains 0, 3, 6, or 9 fractional digits,") + w.P("// depending on required precision, followed by the suffix \"s\".") + w.P("// Accepted are any fractional digits (also none) as long as they fit") + w.P("// into nano-seconds precision and the suffix \"s\" is required.") + w.P("type ", wkt.Name(), " = string;") + case WellKnownEmpty: + w.P("// An empty JSON object") + w.P("type ", wkt.Name(), " = Record;") + case WellKnownTimestamp: + w.P("// Encoded using RFC 3339, where generated output will always be Z-normalized") + w.P("// and uses 0, 3, 6 or 9 fractional digits.") + w.P("// Offsets other than \"Z\" are also accepted.") + w.P("type ", wkt.Name(), " = string;") + case WellKnownFieldMask: + w.P("// In JSON, a field mask is encoded as a single string where paths are") + w.P("// separated by a comma. Fields name in each path are converted") + w.P("// to/from lower-camel naming conventions.") + w.P("// As an example, consider the following message declarations:") + w.P("//") + w.P("// message Profile {") + w.P("// User user = 1;") + w.P("// Photo photo = 2;") + w.P("// }") + w.P("// message User {") + w.P("// string display_name = 1;") + w.P("// string address = 2;") + w.P("// }") + w.P("//") + w.P("// In proto a field mask for `Profile` may look as such:") + w.P("//") + w.P("// mask {") + w.P("// paths: \"user.display_name\"") + w.P("// paths: \"photo\"") + w.P("// }") + w.P("//") + w.P("// In JSON, the same mask is represented as below:") + w.P("//") + w.P("// {") + w.P("// mask: \"user.displayName,photo\"") + w.P("// }") + w.P("type ", wkt.Name(), " = string;") + case WellKnownFloatValue, + WellKnownDoubleValue, + WellKnownInt64Value, + WellKnownInt32Value, + WellKnownUInt64Value, + WellKnownUInt32Value: + w.P("type ", wkt.Name(), " = number | null;") + case WellKnownBytesValue, WellKnownStringValue: + w.P("type ", wkt.Name(), " = string | null;") + case WellKnownBoolValue: + w.P("type ", wkt.Name(), " = boolean | null;") + case WellKnownStruct: + w.P("// Any JSON value.") + w.P("type ", wkt.Name(), " = Record;") + case WellKnownValue: + w.P("type ", wkt.Name(), " = unknown;") + case WellKnownNullValue: + w.P("type ", wkt.Name(), " = null;") + case WellKnownListValue: + w.P("type ", wkt.Name(), " = ", WellKnownValue.Name(), "[];") + default: + w.P("// No mapping for this well known type is generated, yet.") + w.P("type ", wkt.Name(), " = unknown;") + } + return w.String() +} + +type writer struct { + b strings.Builder +} + +func (w *writer) P(ss ...string) { + for _, s := range ss { + // strings.Builder never returns an error, so safe to ignore + _, _ = w.b.WriteString(s) + } + _, _ = w.b.WriteString("\n") +} + +func (w *writer) String() string { + return w.b.String() +} diff --git a/internal/protowalk/walk.go b/internal/protowalk/walk.go new file mode 100644 index 0000000..75c21df --- /dev/null +++ b/internal/protowalk/walk.go @@ -0,0 +1,128 @@ +package protowalk + +import ( + "google.golang.org/protobuf/reflect/protoreflect" +) + +type WalkFunc func(desc protoreflect.Descriptor) bool + +func WalkFiles(files []protoreflect.FileDescriptor, f WalkFunc) { + var w walker + w.walkFiles(files, f) +} + +type walker struct { + seen map[string]struct{} +} + +func (w *walker) enter(name string) bool { + if _, ok := w.seen[name]; ok { + return false + } + if w.seen == nil { + w.seen = make(map[string]struct{}) + } + w.seen[name] = struct{}{} + return true +} + +func (w *walker) walkFiles(files []protoreflect.FileDescriptor, f WalkFunc) { + for _, file := range files { + w.walkFile(file, f) + } +} + +func (w *walker) walkFile(file protoreflect.FileDescriptor, f WalkFunc) { + if w.enter(file.Path()) { + if !f(file) { + return + } + w.walkEnums(file.Enums(), f) + w.walkMessages(file.Messages(), f) + w.walkServices(file.Services(), f) + } +} + +func (w *walker) walkEnums(enums protoreflect.EnumDescriptors, f WalkFunc) { + for i := 0; i < enums.Len(); i++ { + w.walkEnum(enums.Get(i), f) + } +} + +func (w *walker) walkEnum(enum protoreflect.EnumDescriptor, f WalkFunc) { + if w.enter(string(enum.FullName())) { + f(enum) + } +} + +func (w *walker) walkMessages(messages protoreflect.MessageDescriptors, f WalkFunc) { + for i := 0; i < messages.Len(); i++ { + w.walkMessage(messages.Get(i), f) + } +} + +func (w *walker) walkMessage(message protoreflect.MessageDescriptor, f WalkFunc) { + if w.enter(string(message.FullName())) { + if !f(message) { + return + } + w.walkFields(message.Fields(), f) + w.walkMessages(message.Messages(), f) + w.walkEnums(message.Enums(), f) + } +} + +func (w *walker) walkFields(fields protoreflect.FieldDescriptors, f WalkFunc) { + for i := 0; i < fields.Len(); i++ { + w.walkField(fields.Get(i), f) + } +} + +func (w *walker) walkField(field protoreflect.FieldDescriptor, f WalkFunc) { + if w.enter(string(field.FullName())) { + if !f(field) { + return + } + if field.IsMap() { + w.walkField(field.MapKey(), f) + w.walkField(field.MapValue(), f) + } + if field.Message() != nil { + w.walkMessage(field.Message(), f) + } + if field.Enum() != nil { + w.walkEnum(field.Enum(), f) + } + } +} + +func (w *walker) walkServices(services protoreflect.ServiceDescriptors, f WalkFunc) { + for i := 0; i < services.Len(); i++ { + w.walkService(services.Get(i), f) + } +} + +func (w *walker) walkService(service protoreflect.ServiceDescriptor, f WalkFunc) { + if w.enter(string(service.FullName())) { + if !f(service) { + return + } + w.walkMethods(service.Methods(), f) + } +} + +func (w *walker) walkMethods(methods protoreflect.MethodDescriptors, f WalkFunc) { + for i := 0; i < methods.Len(); i++ { + w.walkMethod(methods.Get(i), f) + } +} + +func (w *walker) walkMethod(method protoreflect.MethodDescriptor, f WalkFunc) { + if w.enter(string(method.FullName())) { + if !f(method) { + return + } + w.walkMessage(method.Input(), f) + w.walkMessage(method.Output(), f) + } +} diff --git a/main.go b/main.go new file mode 100644 index 0000000..22ae01d --- /dev/null +++ b/main.go @@ -0,0 +1,42 @@ +package main + +import ( + "fmt" + "io" + "os" + "path/filepath" + + "git.apinb.com/bsm-tools/protoc-gen-ts/internal/plugin" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/pluginpb" +) + +func main() { + if err := run(); err != nil { + fmt.Fprintf(os.Stderr, "%s: %v\n", filepath.Base(os.Args[0]), err) + os.Exit(1) + } +} + +func run() error { + in, err := io.ReadAll(os.Stdin) + if err != nil { + return err + } + req := &pluginpb.CodeGeneratorRequest{} + if err := proto.Unmarshal(in, req); err != nil { + return err + } + resp, err := plugin.Generate(req) + if err != nil { + return err + } + out, err := proto.Marshal(resp) + if err != nil { + return err + } + if _, err := os.Stdout.Write(out); err != nil { + return err + } + return nil +}