build(deps): bump the golang group with 5 updates

Bumps the golang group with 5 updates:

| Package | From | To |
| --- | --- | --- |
| [github.com/Microsoft/hcsshim](https://github.com/Microsoft/hcsshim) | `0.11.4` | `0.12.0` |
| [github.com/alexflint/go-filemutex](https://github.com/alexflint/go-filemutex) | `1.2.0` | `1.3.0` |
| [github.com/onsi/ginkgo/v2](https://github.com/onsi/ginkgo) | `2.13.2` | `2.16.0` |
| [github.com/onsi/gomega](https://github.com/onsi/gomega) | `1.30.0` | `1.31.1` |
| [golang.org/x/sys](https://github.com/golang/sys) | `0.15.0` | `0.17.0` |


Updates `github.com/Microsoft/hcsshim` from 0.11.4 to 0.12.0
- [Release notes](https://github.com/Microsoft/hcsshim/releases)
- [Commits](https://github.com/Microsoft/hcsshim/compare/v0.11.4...v0.12.0)

Updates `github.com/alexflint/go-filemutex` from 1.2.0 to 1.3.0
- [Release notes](https://github.com/alexflint/go-filemutex/releases)
- [Commits](https://github.com/alexflint/go-filemutex/compare/v1.2.0...v1.3.0)

Updates `github.com/onsi/ginkgo/v2` from 2.13.2 to 2.16.0
- [Release notes](https://github.com/onsi/ginkgo/releases)
- [Changelog](https://github.com/onsi/ginkgo/blob/master/CHANGELOG.md)
- [Commits](https://github.com/onsi/ginkgo/compare/v2.13.2...v2.16.0)

Updates `github.com/onsi/gomega` from 1.30.0 to 1.31.1
- [Release notes](https://github.com/onsi/gomega/releases)
- [Changelog](https://github.com/onsi/gomega/blob/master/CHANGELOG.md)
- [Commits](https://github.com/onsi/gomega/compare/v1.30.0...v1.31.1)

Updates `golang.org/x/sys` from 0.15.0 to 0.17.0
- [Commits](https://github.com/golang/sys/compare/v0.15.0...v0.17.0)

---
updated-dependencies:
- dependency-name: github.com/Microsoft/hcsshim
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: golang
- dependency-name: github.com/alexflint/go-filemutex
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: golang
- dependency-name: github.com/onsi/ginkgo/v2
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: golang
- dependency-name: github.com/onsi/gomega
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: golang
- dependency-name: golang.org/x/sys
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: golang
...

Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
dependabot[bot]
2024-03-11 15:54:46 +00:00
committed by GitHub
parent 0144de0fcf
commit 394ab0d149
376 changed files with 12902 additions and 33268 deletions

View File

@ -83,7 +83,13 @@ func formatListOpt(vs list, isRoot, allowMulti bool) string {
case protoreflect.FileImports:
for i := 0; i < vs.Len(); i++ {
var rs records
rs.Append(reflect.ValueOf(vs.Get(i)), "Path", "Package", "IsPublic", "IsWeak")
rv := reflect.ValueOf(vs.Get(i))
rs.Append(rv, []methodAndName{
{rv.MethodByName("Path"), "Path"},
{rv.MethodByName("Package"), "Package"},
{rv.MethodByName("IsPublic"), "IsPublic"},
{rv.MethodByName("IsWeak"), "IsWeak"},
}...)
ss = append(ss, "{"+rs.Join()+"}")
}
return start + joinStrings(ss, allowMulti) + end
@ -92,34 +98,26 @@ func formatListOpt(vs list, isRoot, allowMulti bool) string {
for i := 0; i < vs.Len(); i++ {
m := reflect.ValueOf(vs).MethodByName("Get")
v := m.Call([]reflect.Value{reflect.ValueOf(i)})[0].Interface()
ss = append(ss, formatDescOpt(v.(protoreflect.Descriptor), false, allowMulti && !isEnumValue))
ss = append(ss, formatDescOpt(v.(protoreflect.Descriptor), false, allowMulti && !isEnumValue, nil))
}
return start + joinStrings(ss, allowMulti && isEnumValue) + end
}
}
// descriptorAccessors is a list of accessors to print for each descriptor.
//
// Do not print all accessors since some contain redundant information,
// while others are pointers that we do not want to follow since the descriptor
// is actually a cyclic graph.
//
// Using a list allows us to print the accessors in a sensible order.
var descriptorAccessors = map[reflect.Type][]string{
reflect.TypeOf((*protoreflect.FileDescriptor)(nil)).Elem(): {"Path", "Package", "Imports", "Messages", "Enums", "Extensions", "Services"},
reflect.TypeOf((*protoreflect.MessageDescriptor)(nil)).Elem(): {"IsMapEntry", "Fields", "Oneofs", "ReservedNames", "ReservedRanges", "RequiredNumbers", "ExtensionRanges", "Messages", "Enums", "Extensions"},
reflect.TypeOf((*protoreflect.FieldDescriptor)(nil)).Elem(): {"Number", "Cardinality", "Kind", "HasJSONName", "JSONName", "HasPresence", "IsExtension", "IsPacked", "IsWeak", "IsList", "IsMap", "MapKey", "MapValue", "HasDefault", "Default", "ContainingOneof", "ContainingMessage", "Message", "Enum"},
reflect.TypeOf((*protoreflect.OneofDescriptor)(nil)).Elem(): {"Fields"}, // not directly used; must keep in sync with formatDescOpt
reflect.TypeOf((*protoreflect.EnumDescriptor)(nil)).Elem(): {"Values", "ReservedNames", "ReservedRanges"},
reflect.TypeOf((*protoreflect.EnumValueDescriptor)(nil)).Elem(): {"Number"},
reflect.TypeOf((*protoreflect.ServiceDescriptor)(nil)).Elem(): {"Methods"},
reflect.TypeOf((*protoreflect.MethodDescriptor)(nil)).Elem(): {"Input", "Output", "IsStreamingClient", "IsStreamingServer"},
type methodAndName struct {
method reflect.Value
name string
}
func FormatDesc(s fmt.State, r rune, t protoreflect.Descriptor) {
io.WriteString(s, formatDescOpt(t, true, r == 'v' && (s.Flag('+') || s.Flag('#'))))
io.WriteString(s, formatDescOpt(t, true, r == 'v' && (s.Flag('+') || s.Flag('#')), nil))
}
func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool) string {
func InternalFormatDescOptForTesting(t protoreflect.Descriptor, isRoot, allowMulti bool, record func(string)) string {
return formatDescOpt(t, isRoot, allowMulti, record)
}
func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool, record func(string)) string {
rv := reflect.ValueOf(t)
rt := rv.MethodByName("ProtoType").Type().In(0)
@ -129,26 +127,60 @@ func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool) string {
}
_, isFile := t.(protoreflect.FileDescriptor)
rs := records{allowMulti: allowMulti}
rs := records{
allowMulti: allowMulti,
record: record,
}
if t.IsPlaceholder() {
if isFile {
rs.Append(rv, "Path", "Package", "IsPlaceholder")
rs.Append(rv, []methodAndName{
{rv.MethodByName("Path"), "Path"},
{rv.MethodByName("Package"), "Package"},
{rv.MethodByName("IsPlaceholder"), "IsPlaceholder"},
}...)
} else {
rs.Append(rv, "FullName", "IsPlaceholder")
rs.Append(rv, []methodAndName{
{rv.MethodByName("FullName"), "FullName"},
{rv.MethodByName("IsPlaceholder"), "IsPlaceholder"},
}...)
}
} else {
switch {
case isFile:
rs.Append(rv, "Syntax")
rs.Append(rv, methodAndName{rv.MethodByName("Syntax"), "Syntax"})
case isRoot:
rs.Append(rv, "Syntax", "FullName")
rs.Append(rv, []methodAndName{
{rv.MethodByName("Syntax"), "Syntax"},
{rv.MethodByName("FullName"), "FullName"},
}...)
default:
rs.Append(rv, "Name")
rs.Append(rv, methodAndName{rv.MethodByName("Name"), "Name"})
}
switch t := t.(type) {
case protoreflect.FieldDescriptor:
for _, s := range descriptorAccessors[rt] {
switch s {
accessors := []methodAndName{
{rv.MethodByName("Number"), "Number"},
{rv.MethodByName("Cardinality"), "Cardinality"},
{rv.MethodByName("Kind"), "Kind"},
{rv.MethodByName("HasJSONName"), "HasJSONName"},
{rv.MethodByName("JSONName"), "JSONName"},
{rv.MethodByName("HasPresence"), "HasPresence"},
{rv.MethodByName("IsExtension"), "IsExtension"},
{rv.MethodByName("IsPacked"), "IsPacked"},
{rv.MethodByName("IsWeak"), "IsWeak"},
{rv.MethodByName("IsList"), "IsList"},
{rv.MethodByName("IsMap"), "IsMap"},
{rv.MethodByName("MapKey"), "MapKey"},
{rv.MethodByName("MapValue"), "MapValue"},
{rv.MethodByName("HasDefault"), "HasDefault"},
{rv.MethodByName("Default"), "Default"},
{rv.MethodByName("ContainingOneof"), "ContainingOneof"},
{rv.MethodByName("ContainingMessage"), "ContainingMessage"},
{rv.MethodByName("Message"), "Message"},
{rv.MethodByName("Enum"), "Enum"},
}
for _, s := range accessors {
switch s.name {
case "MapKey":
if k := t.MapKey(); k != nil {
rs.recs = append(rs.recs, [2]string{"MapKey", k.Kind().String()})
@ -157,20 +189,20 @@ func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool) string {
if v := t.MapValue(); v != nil {
switch v.Kind() {
case protoreflect.EnumKind:
rs.recs = append(rs.recs, [2]string{"MapValue", string(v.Enum().FullName())})
rs.AppendRecs("MapValue", [2]string{"MapValue", string(v.Enum().FullName())})
case protoreflect.MessageKind, protoreflect.GroupKind:
rs.recs = append(rs.recs, [2]string{"MapValue", string(v.Message().FullName())})
rs.AppendRecs("MapValue", [2]string{"MapValue", string(v.Message().FullName())})
default:
rs.recs = append(rs.recs, [2]string{"MapValue", v.Kind().String()})
rs.AppendRecs("MapValue", [2]string{"MapValue", v.Kind().String()})
}
}
case "ContainingOneof":
if od := t.ContainingOneof(); od != nil {
rs.recs = append(rs.recs, [2]string{"Oneof", string(od.Name())})
rs.AppendRecs("ContainingOneof", [2]string{"Oneof", string(od.Name())})
}
case "ContainingMessage":
if t.IsExtension() {
rs.recs = append(rs.recs, [2]string{"Extendee", string(t.ContainingMessage().FullName())})
rs.AppendRecs("ContainingMessage", [2]string{"Extendee", string(t.ContainingMessage().FullName())})
}
case "Message":
if !t.IsMap() {
@ -187,13 +219,61 @@ func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool) string {
ss = append(ss, string(fs.Get(i).Name()))
}
if len(ss) > 0 {
rs.recs = append(rs.recs, [2]string{"Fields", "[" + joinStrings(ss, false) + "]"})
rs.AppendRecs("Fields", [2]string{"Fields", "[" + joinStrings(ss, false) + "]"})
}
default:
rs.Append(rv, descriptorAccessors[rt]...)
case protoreflect.FileDescriptor:
rs.Append(rv, []methodAndName{
{rv.MethodByName("Path"), "Path"},
{rv.MethodByName("Package"), "Package"},
{rv.MethodByName("Imports"), "Imports"},
{rv.MethodByName("Messages"), "Messages"},
{rv.MethodByName("Enums"), "Enums"},
{rv.MethodByName("Extensions"), "Extensions"},
{rv.MethodByName("Services"), "Services"},
}...)
case protoreflect.MessageDescriptor:
rs.Append(rv, []methodAndName{
{rv.MethodByName("IsMapEntry"), "IsMapEntry"},
{rv.MethodByName("Fields"), "Fields"},
{rv.MethodByName("Oneofs"), "Oneofs"},
{rv.MethodByName("ReservedNames"), "ReservedNames"},
{rv.MethodByName("ReservedRanges"), "ReservedRanges"},
{rv.MethodByName("RequiredNumbers"), "RequiredNumbers"},
{rv.MethodByName("ExtensionRanges"), "ExtensionRanges"},
{rv.MethodByName("Messages"), "Messages"},
{rv.MethodByName("Enums"), "Enums"},
{rv.MethodByName("Extensions"), "Extensions"},
}...)
case protoreflect.EnumDescriptor:
rs.Append(rv, []methodAndName{
{rv.MethodByName("Values"), "Values"},
{rv.MethodByName("ReservedNames"), "ReservedNames"},
{rv.MethodByName("ReservedRanges"), "ReservedRanges"},
}...)
case protoreflect.EnumValueDescriptor:
rs.Append(rv, []methodAndName{
{rv.MethodByName("Number"), "Number"},
}...)
case protoreflect.ServiceDescriptor:
rs.Append(rv, []methodAndName{
{rv.MethodByName("Methods"), "Methods"},
}...)
case protoreflect.MethodDescriptor:
rs.Append(rv, []methodAndName{
{rv.MethodByName("Input"), "Input"},
{rv.MethodByName("Output"), "Output"},
{rv.MethodByName("IsStreamingClient"), "IsStreamingClient"},
{rv.MethodByName("IsStreamingServer"), "IsStreamingServer"},
}...)
}
if rv.MethodByName("GoType").IsValid() {
rs.Append(rv, "GoType")
if m := rv.MethodByName("GoType"); m.IsValid() {
rs.Append(rv, methodAndName{m, "GoType"})
}
}
return start + rs.Join() + end
@ -202,19 +282,34 @@ func formatDescOpt(t protoreflect.Descriptor, isRoot, allowMulti bool) string {
type records struct {
recs [][2]string
allowMulti bool
// record is a function that will be called for every Append() or
// AppendRecs() call, to be used for testing with the
// InternalFormatDescOptForTesting function.
record func(string)
}
func (rs *records) Append(v reflect.Value, accessors ...string) {
func (rs *records) AppendRecs(fieldName string, newRecs [2]string) {
if rs.record != nil {
rs.record(fieldName)
}
rs.recs = append(rs.recs, newRecs)
}
func (rs *records) Append(v reflect.Value, accessors ...methodAndName) {
for _, a := range accessors {
if rs.record != nil {
rs.record(a.name)
}
var rv reflect.Value
if m := v.MethodByName(a); m.IsValid() {
rv = m.Call(nil)[0]
if a.method.IsValid() {
rv = a.method.Call(nil)[0]
}
if v.Kind() == reflect.Struct && !rv.IsValid() {
rv = v.FieldByName(a)
rv = v.FieldByName(a.name)
}
if !rv.IsValid() {
panic(fmt.Sprintf("unknown accessor: %v.%s", v.Type(), a))
panic(fmt.Sprintf("unknown accessor: %v.%s", v.Type(), a.name))
}
if _, ok := rv.Interface().(protoreflect.Value); ok {
rv = rv.MethodByName("Interface").Call(nil)[0]
@ -261,7 +356,7 @@ func (rs *records) Append(v reflect.Value, accessors ...string) {
default:
s = fmt.Sprint(v)
}
rs.recs = append(rs.recs, [2]string{a, s})
rs.recs = append(rs.recs, [2]string{a.name, s})
}
}

View File

@ -0,0 +1,340 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package json
import (
"bytes"
"fmt"
"io"
"regexp"
"unicode/utf8"
"google.golang.org/protobuf/internal/errors"
)
// call specifies which Decoder method was invoked.
type call uint8
const (
readCall call = iota
peekCall
)
const unexpectedFmt = "unexpected token %s"
// ErrUnexpectedEOF means that EOF was encountered in the middle of the input.
var ErrUnexpectedEOF = errors.New("%v", io.ErrUnexpectedEOF)
// Decoder is a token-based JSON decoder.
type Decoder struct {
// lastCall is last method called, either readCall or peekCall.
// Initial value is readCall.
lastCall call
// lastToken contains the last read token.
lastToken Token
// lastErr contains the last read error.
lastErr error
// openStack is a stack containing ObjectOpen and ArrayOpen values. The
// top of stack represents the object or the array the current value is
// directly located in.
openStack []Kind
// orig is used in reporting line and column.
orig []byte
// in contains the unconsumed input.
in []byte
}
// NewDecoder returns a Decoder to read the given []byte.
func NewDecoder(b []byte) *Decoder {
return &Decoder{orig: b, in: b}
}
// Peek looks ahead and returns the next token kind without advancing a read.
func (d *Decoder) Peek() (Token, error) {
defer func() { d.lastCall = peekCall }()
if d.lastCall == readCall {
d.lastToken, d.lastErr = d.Read()
}
return d.lastToken, d.lastErr
}
// Read returns the next JSON token.
// It will return an error if there is no valid token.
func (d *Decoder) Read() (Token, error) {
const scalar = Null | Bool | Number | String
defer func() { d.lastCall = readCall }()
if d.lastCall == peekCall {
return d.lastToken, d.lastErr
}
tok, err := d.parseNext()
if err != nil {
return Token{}, err
}
switch tok.kind {
case EOF:
if len(d.openStack) != 0 ||
d.lastToken.kind&scalar|ObjectClose|ArrayClose == 0 {
return Token{}, ErrUnexpectedEOF
}
case Null:
if !d.isValueNext() {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
case Bool, Number:
if !d.isValueNext() {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
case String:
if d.isValueNext() {
break
}
// This string token should only be for a field name.
if d.lastToken.kind&(ObjectOpen|comma) == 0 {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
if len(d.in) == 0 {
return Token{}, ErrUnexpectedEOF
}
if c := d.in[0]; c != ':' {
return Token{}, d.newSyntaxError(d.currPos(), `unexpected character %s, missing ":" after field name`, string(c))
}
tok.kind = Name
d.consume(1)
case ObjectOpen, ArrayOpen:
if !d.isValueNext() {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
d.openStack = append(d.openStack, tok.kind)
case ObjectClose:
if len(d.openStack) == 0 ||
d.lastToken.kind == comma ||
d.openStack[len(d.openStack)-1] != ObjectOpen {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
d.openStack = d.openStack[:len(d.openStack)-1]
case ArrayClose:
if len(d.openStack) == 0 ||
d.lastToken.kind == comma ||
d.openStack[len(d.openStack)-1] != ArrayOpen {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
d.openStack = d.openStack[:len(d.openStack)-1]
case comma:
if len(d.openStack) == 0 ||
d.lastToken.kind&(scalar|ObjectClose|ArrayClose) == 0 {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}
}
// Update d.lastToken only after validating token to be in the right sequence.
d.lastToken = tok
if d.lastToken.kind == comma {
return d.Read()
}
return tok, nil
}
// Any sequence that looks like a non-delimiter (for error reporting).
var errRegexp = regexp.MustCompile(`^([-+._a-zA-Z0-9]{1,32}|.)`)
// parseNext parses for the next JSON token. It returns a Token object for
// different types, except for Name. It does not handle whether the next token
// is in a valid sequence or not.
func (d *Decoder) parseNext() (Token, error) {
// Trim leading spaces.
d.consume(0)
in := d.in
if len(in) == 0 {
return d.consumeToken(EOF, 0), nil
}
switch in[0] {
case 'n':
if n := matchWithDelim("null", in); n != 0 {
return d.consumeToken(Null, n), nil
}
case 't':
if n := matchWithDelim("true", in); n != 0 {
return d.consumeBoolToken(true, n), nil
}
case 'f':
if n := matchWithDelim("false", in); n != 0 {
return d.consumeBoolToken(false, n), nil
}
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
if n, ok := parseNumber(in); ok {
return d.consumeToken(Number, n), nil
}
case '"':
s, n, err := d.parseString(in)
if err != nil {
return Token{}, err
}
return d.consumeStringToken(s, n), nil
case '{':
return d.consumeToken(ObjectOpen, 1), nil
case '}':
return d.consumeToken(ObjectClose, 1), nil
case '[':
return d.consumeToken(ArrayOpen, 1), nil
case ']':
return d.consumeToken(ArrayClose, 1), nil
case ',':
return d.consumeToken(comma, 1), nil
}
return Token{}, d.newSyntaxError(d.currPos(), "invalid value %s", errRegexp.Find(in))
}
// newSyntaxError returns an error with line and column information useful for
// syntax errors.
func (d *Decoder) newSyntaxError(pos int, f string, x ...interface{}) error {
e := errors.New(f, x...)
line, column := d.Position(pos)
return errors.New("syntax error (line %d:%d): %v", line, column, e)
}
// Position returns line and column number of given index of the original input.
// It will panic if index is out of range.
func (d *Decoder) Position(idx int) (line int, column int) {
b := d.orig[:idx]
line = bytes.Count(b, []byte("\n")) + 1
if i := bytes.LastIndexByte(b, '\n'); i >= 0 {
b = b[i+1:]
}
column = utf8.RuneCount(b) + 1 // ignore multi-rune characters
return line, column
}
// currPos returns the current index position of d.in from d.orig.
func (d *Decoder) currPos() int {
return len(d.orig) - len(d.in)
}
// matchWithDelim matches s with the input b and verifies that the match
// terminates with a delimiter of some form (e.g., r"[^-+_.a-zA-Z0-9]").
// As a special case, EOF is considered a delimiter. It returns the length of s
// if there is a match, else 0.
func matchWithDelim(s string, b []byte) int {
if !bytes.HasPrefix(b, []byte(s)) {
return 0
}
n := len(s)
if n < len(b) && isNotDelim(b[n]) {
return 0
}
return n
}
// isNotDelim returns true if given byte is a not delimiter character.
func isNotDelim(c byte) bool {
return (c == '-' || c == '+' || c == '.' || c == '_' ||
('a' <= c && c <= 'z') ||
('A' <= c && c <= 'Z') ||
('0' <= c && c <= '9'))
}
// consume consumes n bytes of input and any subsequent whitespace.
func (d *Decoder) consume(n int) {
d.in = d.in[n:]
for len(d.in) > 0 {
switch d.in[0] {
case ' ', '\n', '\r', '\t':
d.in = d.in[1:]
default:
return
}
}
}
// isValueNext returns true if next type should be a JSON value: Null,
// Number, String or Bool.
func (d *Decoder) isValueNext() bool {
if len(d.openStack) == 0 {
return d.lastToken.kind == 0
}
start := d.openStack[len(d.openStack)-1]
switch start {
case ObjectOpen:
return d.lastToken.kind&Name != 0
case ArrayOpen:
return d.lastToken.kind&(ArrayOpen|comma) != 0
}
panic(fmt.Sprintf(
"unreachable logic in Decoder.isValueNext, lastToken.kind: %v, openStack: %v",
d.lastToken.kind, start))
}
// consumeToken constructs a Token for given Kind with raw value derived from
// current d.in and given size, and consumes the given size-length of it.
func (d *Decoder) consumeToken(kind Kind, size int) Token {
tok := Token{
kind: kind,
raw: d.in[:size],
pos: len(d.orig) - len(d.in),
}
d.consume(size)
return tok
}
// consumeBoolToken constructs a Token for a Bool kind with raw value derived from
// current d.in and given size.
func (d *Decoder) consumeBoolToken(b bool, size int) Token {
tok := Token{
kind: Bool,
raw: d.in[:size],
pos: len(d.orig) - len(d.in),
boo: b,
}
d.consume(size)
return tok
}
// consumeStringToken constructs a Token for a String kind with raw value derived
// from current d.in and given size.
func (d *Decoder) consumeStringToken(s string, size int) Token {
tok := Token{
kind: String,
raw: d.in[:size],
pos: len(d.orig) - len(d.in),
str: s,
}
d.consume(size)
return tok
}
// Clone returns a copy of the Decoder for use in reading ahead the next JSON
// object, array or other values without affecting current Decoder.
func (d *Decoder) Clone() *Decoder {
ret := *d
ret.openStack = append([]Kind(nil), ret.openStack...)
return &ret
}

View File

@ -0,0 +1,254 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package json
import (
"bytes"
"strconv"
)
// parseNumber reads the given []byte for a valid JSON number. If it is valid,
// it returns the number of bytes. Parsing logic follows the definition in
// https://tools.ietf.org/html/rfc7159#section-6, and is based off
// encoding/json.isValidNumber function.
func parseNumber(input []byte) (int, bool) {
var n int
s := input
if len(s) == 0 {
return 0, false
}
// Optional -
if s[0] == '-' {
s = s[1:]
n++
if len(s) == 0 {
return 0, false
}
}
// Digits
switch {
case s[0] == '0':
s = s[1:]
n++
case '1' <= s[0] && s[0] <= '9':
s = s[1:]
n++
for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
s = s[1:]
n++
}
default:
return 0, false
}
// . followed by 1 or more digits.
if len(s) >= 2 && s[0] == '.' && '0' <= s[1] && s[1] <= '9' {
s = s[2:]
n += 2
for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
s = s[1:]
n++
}
}
// e or E followed by an optional - or + and
// 1 or more digits.
if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') {
s = s[1:]
n++
if s[0] == '+' || s[0] == '-' {
s = s[1:]
n++
if len(s) == 0 {
return 0, false
}
}
for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
s = s[1:]
n++
}
}
// Check that next byte is a delimiter or it is at the end.
if n < len(input) && isNotDelim(input[n]) {
return 0, false
}
return n, true
}
// numberParts is the result of parsing out a valid JSON number. It contains
// the parts of a number. The parts are used for integer conversion.
type numberParts struct {
neg bool
intp []byte
frac []byte
exp []byte
}
// parseNumber constructs numberParts from given []byte. The logic here is
// similar to consumeNumber above with the difference of having to construct
// numberParts. The slice fields in numberParts are subslices of the input.
func parseNumberParts(input []byte) (numberParts, bool) {
var neg bool
var intp []byte
var frac []byte
var exp []byte
s := input
if len(s) == 0 {
return numberParts{}, false
}
// Optional -
if s[0] == '-' {
neg = true
s = s[1:]
if len(s) == 0 {
return numberParts{}, false
}
}
// Digits
switch {
case s[0] == '0':
// Skip first 0 and no need to store.
s = s[1:]
case '1' <= s[0] && s[0] <= '9':
intp = s
n := 1
s = s[1:]
for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
s = s[1:]
n++
}
intp = intp[:n]
default:
return numberParts{}, false
}
// . followed by 1 or more digits.
if len(s) >= 2 && s[0] == '.' && '0' <= s[1] && s[1] <= '9' {
frac = s[1:]
n := 1
s = s[2:]
for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
s = s[1:]
n++
}
frac = frac[:n]
}
// e or E followed by an optional - or + and
// 1 or more digits.
if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') {
s = s[1:]
exp = s
n := 0
if s[0] == '+' || s[0] == '-' {
s = s[1:]
n++
if len(s) == 0 {
return numberParts{}, false
}
}
for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
s = s[1:]
n++
}
exp = exp[:n]
}
return numberParts{
neg: neg,
intp: intp,
frac: bytes.TrimRight(frac, "0"), // Remove unnecessary 0s to the right.
exp: exp,
}, true
}
// normalizeToIntString returns an integer string in normal form without the
// E-notation for given numberParts. It will return false if it is not an
// integer or if the exponent exceeds than max/min int value.
func normalizeToIntString(n numberParts) (string, bool) {
intpSize := len(n.intp)
fracSize := len(n.frac)
if intpSize == 0 && fracSize == 0 {
return "0", true
}
var exp int
if len(n.exp) > 0 {
i, err := strconv.ParseInt(string(n.exp), 10, 32)
if err != nil {
return "", false
}
exp = int(i)
}
var num []byte
if exp >= 0 {
// For positive E, shift fraction digits into integer part and also pad
// with zeroes as needed.
// If there are more digits in fraction than the E value, then the
// number is not an integer.
if fracSize > exp {
return "", false
}
// Make sure resulting digits are within max value limit to avoid
// unnecessarily constructing a large byte slice that may simply fail
// later on.
const maxDigits = 20 // Max uint64 value has 20 decimal digits.
if intpSize+exp > maxDigits {
return "", false
}
// Set cap to make a copy of integer part when appended.
num = n.intp[:len(n.intp):len(n.intp)]
num = append(num, n.frac...)
for i := 0; i < exp-fracSize; i++ {
num = append(num, '0')
}
} else {
// For negative E, shift digits in integer part out.
// If there are fractions, then the number is not an integer.
if fracSize > 0 {
return "", false
}
// index is where the decimal point will be after adjusting for negative
// exponent.
index := intpSize + exp
if index < 0 {
return "", false
}
num = n.intp
// If any of the digits being shifted to the right of the decimal point
// is non-zero, then the number is not an integer.
for i := index; i < intpSize; i++ {
if num[i] != '0' {
return "", false
}
}
num = num[:index]
}
if n.neg {
return "-" + string(num), true
}
return string(num), true
}

View File

@ -0,0 +1,91 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package json
import (
"strconv"
"unicode"
"unicode/utf16"
"unicode/utf8"
"google.golang.org/protobuf/internal/strs"
)
func (d *Decoder) parseString(in []byte) (string, int, error) {
in0 := in
if len(in) == 0 {
return "", 0, ErrUnexpectedEOF
}
if in[0] != '"' {
return "", 0, d.newSyntaxError(d.currPos(), "invalid character %q at start of string", in[0])
}
in = in[1:]
i := indexNeedEscapeInBytes(in)
in, out := in[i:], in[:i:i] // set cap to prevent mutations
for len(in) > 0 {
switch r, n := utf8.DecodeRune(in); {
case r == utf8.RuneError && n == 1:
return "", 0, d.newSyntaxError(d.currPos(), "invalid UTF-8 in string")
case r < ' ':
return "", 0, d.newSyntaxError(d.currPos(), "invalid character %q in string", r)
case r == '"':
in = in[1:]
n := len(in0) - len(in)
return string(out), n, nil
case r == '\\':
if len(in) < 2 {
return "", 0, ErrUnexpectedEOF
}
switch r := in[1]; r {
case '"', '\\', '/':
in, out = in[2:], append(out, r)
case 'b':
in, out = in[2:], append(out, '\b')
case 'f':
in, out = in[2:], append(out, '\f')
case 'n':
in, out = in[2:], append(out, '\n')
case 'r':
in, out = in[2:], append(out, '\r')
case 't':
in, out = in[2:], append(out, '\t')
case 'u':
if len(in) < 6 {
return "", 0, ErrUnexpectedEOF
}
v, err := strconv.ParseUint(string(in[2:6]), 16, 16)
if err != nil {
return "", 0, d.newSyntaxError(d.currPos(), "invalid escape code %q in string", in[:6])
}
in = in[6:]
r := rune(v)
if utf16.IsSurrogate(r) {
if len(in) < 6 {
return "", 0, ErrUnexpectedEOF
}
v, err := strconv.ParseUint(string(in[2:6]), 16, 16)
r = utf16.DecodeRune(r, rune(v))
if in[0] != '\\' || in[1] != 'u' ||
r == unicode.ReplacementChar || err != nil {
return "", 0, d.newSyntaxError(d.currPos(), "invalid escape code %q in string", in[:6])
}
in = in[6:]
}
out = append(out, string(r)...)
default:
return "", 0, d.newSyntaxError(d.currPos(), "invalid escape code %q in string", in[:2])
}
default:
i := indexNeedEscapeInBytes(in[n:])
in, out = in[n+i:], append(out, in[:n+i]...)
}
}
return "", 0, ErrUnexpectedEOF
}
// indexNeedEscapeInBytes returns the index of the character that needs
// escaping. If no characters need escaping, this returns the input length.
func indexNeedEscapeInBytes(b []byte) int { return indexNeedEscapeInString(strs.UnsafeString(b)) }

View File

@ -0,0 +1,192 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package json
import (
"bytes"
"fmt"
"strconv"
)
// Kind represents a token kind expressible in the JSON format.
type Kind uint16
const (
Invalid Kind = (1 << iota) / 2
EOF
Null
Bool
Number
String
Name
ObjectOpen
ObjectClose
ArrayOpen
ArrayClose
// comma is only for parsing in between tokens and
// does not need to be exported.
comma
)
func (k Kind) String() string {
switch k {
case EOF:
return "eof"
case Null:
return "null"
case Bool:
return "bool"
case Number:
return "number"
case String:
return "string"
case ObjectOpen:
return "{"
case ObjectClose:
return "}"
case Name:
return "name"
case ArrayOpen:
return "["
case ArrayClose:
return "]"
case comma:
return ","
}
return "<invalid>"
}
// Token provides a parsed token kind and value.
//
// Values are provided by the difference accessor methods. The accessor methods
// Name, Bool, and ParsedString will panic if called on the wrong kind. There
// are different accessor methods for the Number kind for converting to the
// appropriate Go numeric type and those methods have the ok return value.
type Token struct {
// Token kind.
kind Kind
// pos provides the position of the token in the original input.
pos int
// raw bytes of the serialized token.
// This is a subslice into the original input.
raw []byte
// boo is parsed boolean value.
boo bool
// str is parsed string value.
str string
}
// Kind returns the token kind.
func (t Token) Kind() Kind {
return t.kind
}
// RawString returns the read value in string.
func (t Token) RawString() string {
return string(t.raw)
}
// Pos returns the token position from the input.
func (t Token) Pos() int {
return t.pos
}
// Name returns the object name if token is Name, else it panics.
func (t Token) Name() string {
if t.kind == Name {
return t.str
}
panic(fmt.Sprintf("Token is not a Name: %v", t.RawString()))
}
// Bool returns the bool value if token kind is Bool, else it panics.
func (t Token) Bool() bool {
if t.kind == Bool {
return t.boo
}
panic(fmt.Sprintf("Token is not a Bool: %v", t.RawString()))
}
// ParsedString returns the string value for a JSON string token or the read
// value in string if token is not a string.
func (t Token) ParsedString() string {
if t.kind == String {
return t.str
}
panic(fmt.Sprintf("Token is not a String: %v", t.RawString()))
}
// Float returns the floating-point number if token kind is Number.
//
// The floating-point precision is specified by the bitSize parameter: 32 for
// float32 or 64 for float64. If bitSize=32, the result still has type float64,
// but it will be convertible to float32 without changing its value. It will
// return false if the number exceeds the floating point limits for given
// bitSize.
func (t Token) Float(bitSize int) (float64, bool) {
if t.kind != Number {
return 0, false
}
f, err := strconv.ParseFloat(t.RawString(), bitSize)
if err != nil {
return 0, false
}
return f, true
}
// Int returns the signed integer number if token is Number.
//
// The given bitSize specifies the integer type that the result must fit into.
// It returns false if the number is not an integer value or if the result
// exceeds the limits for given bitSize.
func (t Token) Int(bitSize int) (int64, bool) {
s, ok := t.getIntStr()
if !ok {
return 0, false
}
n, err := strconv.ParseInt(s, 10, bitSize)
if err != nil {
return 0, false
}
return n, true
}
// Uint returns the signed integer number if token is Number.
//
// The given bitSize specifies the unsigned integer type that the result must
// fit into. It returns false if the number is not an unsigned integer value
// or if the result exceeds the limits for given bitSize.
func (t Token) Uint(bitSize int) (uint64, bool) {
s, ok := t.getIntStr()
if !ok {
return 0, false
}
n, err := strconv.ParseUint(s, 10, bitSize)
if err != nil {
return 0, false
}
return n, true
}
func (t Token) getIntStr() (string, bool) {
if t.kind != Number {
return "", false
}
parts, ok := parseNumberParts(t.raw)
if !ok {
return "", false
}
return normalizeToIntString(parts)
}
// TokenEquals returns true if given Tokens are equal, else false.
func TokenEquals(x, y Token) bool {
return x.kind == y.kind &&
x.pos == y.pos &&
bytes.Equal(x.raw, y.raw) &&
x.boo == y.boo &&
x.str == y.str
}

View File

@ -0,0 +1,278 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package json
import (
"math"
"math/bits"
"strconv"
"strings"
"unicode/utf8"
"google.golang.org/protobuf/internal/detrand"
"google.golang.org/protobuf/internal/errors"
)
// kind represents an encoding type.
type kind uint8
const (
_ kind = (1 << iota) / 2
name
scalar
objectOpen
objectClose
arrayOpen
arrayClose
)
// Encoder provides methods to write out JSON constructs and values. The user is
// responsible for producing valid sequences of JSON constructs and values.
type Encoder struct {
indent string
lastKind kind
indents []byte
out []byte
}
// NewEncoder returns an Encoder.
//
// If indent is a non-empty string, it causes every entry for an Array or Object
// to be preceded by the indent and trailed by a newline.
func NewEncoder(buf []byte, indent string) (*Encoder, error) {
e := &Encoder{
out: buf,
}
if len(indent) > 0 {
if strings.Trim(indent, " \t") != "" {
return nil, errors.New("indent may only be composed of space or tab characters")
}
e.indent = indent
}
return e, nil
}
// Bytes returns the content of the written bytes.
func (e *Encoder) Bytes() []byte {
return e.out
}
// WriteNull writes out the null value.
func (e *Encoder) WriteNull() {
e.prepareNext(scalar)
e.out = append(e.out, "null"...)
}
// WriteBool writes out the given boolean value.
func (e *Encoder) WriteBool(b bool) {
e.prepareNext(scalar)
if b {
e.out = append(e.out, "true"...)
} else {
e.out = append(e.out, "false"...)
}
}
// WriteString writes out the given string in JSON string value. Returns error
// if input string contains invalid UTF-8.
func (e *Encoder) WriteString(s string) error {
e.prepareNext(scalar)
var err error
if e.out, err = appendString(e.out, s); err != nil {
return err
}
return nil
}
// Sentinel error used for indicating invalid UTF-8.
var errInvalidUTF8 = errors.New("invalid UTF-8")
func appendString(out []byte, in string) ([]byte, error) {
out = append(out, '"')
i := indexNeedEscapeInString(in)
in, out = in[i:], append(out, in[:i]...)
for len(in) > 0 {
switch r, n := utf8.DecodeRuneInString(in); {
case r == utf8.RuneError && n == 1:
return out, errInvalidUTF8
case r < ' ' || r == '"' || r == '\\':
out = append(out, '\\')
switch r {
case '"', '\\':
out = append(out, byte(r))
case '\b':
out = append(out, 'b')
case '\f':
out = append(out, 'f')
case '\n':
out = append(out, 'n')
case '\r':
out = append(out, 'r')
case '\t':
out = append(out, 't')
default:
out = append(out, 'u')
out = append(out, "0000"[1+(bits.Len32(uint32(r))-1)/4:]...)
out = strconv.AppendUint(out, uint64(r), 16)
}
in = in[n:]
default:
i := indexNeedEscapeInString(in[n:])
in, out = in[n+i:], append(out, in[:n+i]...)
}
}
out = append(out, '"')
return out, nil
}
// indexNeedEscapeInString returns the index of the character that needs
// escaping. If no characters need escaping, this returns the input length.
func indexNeedEscapeInString(s string) int {
for i, r := range s {
if r < ' ' || r == '\\' || r == '"' || r == utf8.RuneError {
return i
}
}
return len(s)
}
// WriteFloat writes out the given float and bitSize in JSON number value.
func (e *Encoder) WriteFloat(n float64, bitSize int) {
e.prepareNext(scalar)
e.out = appendFloat(e.out, n, bitSize)
}
// appendFloat formats given float in bitSize, and appends to the given []byte.
func appendFloat(out []byte, n float64, bitSize int) []byte {
switch {
case math.IsNaN(n):
return append(out, `"NaN"`...)
case math.IsInf(n, +1):
return append(out, `"Infinity"`...)
case math.IsInf(n, -1):
return append(out, `"-Infinity"`...)
}
// JSON number formatting logic based on encoding/json.
// See floatEncoder.encode for reference.
fmt := byte('f')
if abs := math.Abs(n); abs != 0 {
if bitSize == 64 && (abs < 1e-6 || abs >= 1e21) ||
bitSize == 32 && (float32(abs) < 1e-6 || float32(abs) >= 1e21) {
fmt = 'e'
}
}
out = strconv.AppendFloat(out, n, fmt, -1, bitSize)
if fmt == 'e' {
n := len(out)
if n >= 4 && out[n-4] == 'e' && out[n-3] == '-' && out[n-2] == '0' {
out[n-2] = out[n-1]
out = out[:n-1]
}
}
return out
}
// WriteInt writes out the given signed integer in JSON number value.
func (e *Encoder) WriteInt(n int64) {
e.prepareNext(scalar)
e.out = strconv.AppendInt(e.out, n, 10)
}
// WriteUint writes out the given unsigned integer in JSON number value.
func (e *Encoder) WriteUint(n uint64) {
e.prepareNext(scalar)
e.out = strconv.AppendUint(e.out, n, 10)
}
// StartObject writes out the '{' symbol.
func (e *Encoder) StartObject() {
e.prepareNext(objectOpen)
e.out = append(e.out, '{')
}
// EndObject writes out the '}' symbol.
func (e *Encoder) EndObject() {
e.prepareNext(objectClose)
e.out = append(e.out, '}')
}
// WriteName writes out the given string in JSON string value and the name
// separator ':'. Returns error if input string contains invalid UTF-8, which
// should not be likely as protobuf field names should be valid.
func (e *Encoder) WriteName(s string) error {
e.prepareNext(name)
var err error
// Append to output regardless of error.
e.out, err = appendString(e.out, s)
e.out = append(e.out, ':')
return err
}
// StartArray writes out the '[' symbol.
func (e *Encoder) StartArray() {
e.prepareNext(arrayOpen)
e.out = append(e.out, '[')
}
// EndArray writes out the ']' symbol.
func (e *Encoder) EndArray() {
e.prepareNext(arrayClose)
e.out = append(e.out, ']')
}
// prepareNext adds possible comma and indentation for the next value based
// on last type and indent option. It also updates lastKind to next.
func (e *Encoder) prepareNext(next kind) {
defer func() {
// Set lastKind to next.
e.lastKind = next
}()
if len(e.indent) == 0 {
// Need to add comma on the following condition.
if e.lastKind&(scalar|objectClose|arrayClose) != 0 &&
next&(name|scalar|objectOpen|arrayOpen) != 0 {
e.out = append(e.out, ',')
// For single-line output, add a random extra space after each
// comma to make output unstable.
if detrand.Bool() {
e.out = append(e.out, ' ')
}
}
return
}
switch {
case e.lastKind&(objectOpen|arrayOpen) != 0:
// If next type is NOT closing, add indent and newline.
if next&(objectClose|arrayClose) == 0 {
e.indents = append(e.indents, e.indent...)
e.out = append(e.out, '\n')
e.out = append(e.out, e.indents...)
}
case e.lastKind&(scalar|objectClose|arrayClose) != 0:
switch {
// If next type is either a value or name, add comma and newline.
case next&(name|scalar|objectOpen|arrayOpen) != 0:
e.out = append(e.out, ',', '\n')
// If next type is a closing object or array, adjust indentation.
case next&(objectClose|arrayClose) != 0:
e.indents = e.indents[:len(e.indents)-len(e.indent)]
e.out = append(e.out, '\n')
}
e.out = append(e.out, e.indents...)
case e.lastKind&name != 0:
e.out = append(e.out, ' ')
// For multi-line output, add a random extra space after key: to make
// output unstable.
if detrand.Bool() {
e.out = append(e.out, ' ')
}
}
}

View File

@ -53,8 +53,10 @@ type encoderState struct {
// If outputASCII is true, strings will be serialized in such a way that
// multi-byte UTF-8 sequences are escaped. This property ensures that the
// overall output is ASCII (as opposed to UTF-8).
func NewEncoder(indent string, delims [2]byte, outputASCII bool) (*Encoder, error) {
e := &Encoder{}
func NewEncoder(buf []byte, indent string, delims [2]byte, outputASCII bool) (*Encoder, error) {
e := &Encoder{
encoderState: encoderState{out: buf},
}
if len(indent) > 0 {
if strings.Trim(indent, " \t") != "" {
return nil, errors.New("indent may only be composed of space and tab characters")
@ -195,13 +197,13 @@ func appendFloat(out []byte, n float64, bitSize int) []byte {
// WriteInt writes out the given signed integer value.
func (e *Encoder) WriteInt(n int64) {
e.prepareNext(scalar)
e.out = append(e.out, strconv.FormatInt(n, 10)...)
e.out = strconv.AppendInt(e.out, n, 10)
}
// WriteUint writes out the given unsigned integer value.
func (e *Encoder) WriteUint(n uint64) {
e.prepareNext(scalar)
e.out = append(e.out, strconv.FormatUint(n, 10)...)
e.out = strconv.AppendUint(e.out, n, 10)
}
// WriteLiteral writes out the given string as a literal value without quotes.

View File

@ -21,11 +21,26 @@ import (
"google.golang.org/protobuf/reflect/protoregistry"
)
// Edition is an Enum for proto2.Edition
type Edition int32
// These values align with the value of Enum in descriptor.proto which allows
// direct conversion between the proto enum and this enum.
const (
EditionUnknown Edition = 0
EditionProto2 Edition = 998
EditionProto3 Edition = 999
Edition2023 Edition = 1000
EditionUnsupported Edition = 100000
)
// The types in this file may have a suffix:
// • L0: Contains fields common to all descriptors (except File) and
// must be initialized up front.
// • L1: Contains fields specific to a descriptor and
// must be initialized up front.
// must be initialized up front. If the associated proto uses Editions, the
// Editions features must always be resolved. If not explicitly set, the
// appropriate default must be resolved and set.
// • L2: Contains fields that are lazily initialized when constructing
// from the raw file descriptor. When constructing as a literal, the L2
// fields must be initialized up front.
@ -44,6 +59,7 @@ type (
}
FileL1 struct {
Syntax protoreflect.Syntax
Edition Edition // Only used if Syntax == Editions
Path string
Package protoreflect.FullName
@ -51,12 +67,35 @@ type (
Messages Messages
Extensions Extensions
Services Services
EditionFeatures FileEditionFeatures
}
FileL2 struct {
Options func() protoreflect.ProtoMessage
Imports FileImports
Locations SourceLocations
}
FileEditionFeatures struct {
// IsFieldPresence is true if field_presence is EXPLICIT
// https://protobuf.dev/editions/features/#field_presence
IsFieldPresence bool
// IsOpenEnum is true if enum_type is OPEN
// https://protobuf.dev/editions/features/#enum_type
IsOpenEnum bool
// IsPacked is true if repeated_field_encoding is PACKED
// https://protobuf.dev/editions/features/#repeated_field_encoding
IsPacked bool
// IsUTF8Validated is true if utf_validation is VERIFY
// https://protobuf.dev/editions/features/#utf8_validation
IsUTF8Validated bool
// IsDelimitedEncoded is true if message_encoding is DELIMITED
// https://protobuf.dev/editions/features/#message_encoding
IsDelimitedEncoded bool
// IsJSONCompliant is true if json_format is ALLOW
// https://protobuf.dev/editions/features/#json_format
IsJSONCompliant bool
}
)
func (fd *File) ParentFile() protoreflect.FileDescriptor { return fd }
@ -210,6 +249,9 @@ type (
ContainingOneof protoreflect.OneofDescriptor // must be consistent with Message.Oneofs.Fields
Enum protoreflect.EnumDescriptor
Message protoreflect.MessageDescriptor
// Edition features.
Presence bool
}
Oneof struct {
@ -273,6 +315,9 @@ func (fd *Field) HasJSONName() bool { return fd.L1.StringNam
func (fd *Field) JSONName() string { return fd.L1.StringName.getJSON(fd) }
func (fd *Field) TextName() string { return fd.L1.StringName.getText(fd) }
func (fd *Field) HasPresence() bool {
if fd.L0.ParentFile.L1.Syntax == protoreflect.Editions {
return fd.L1.Presence || fd.L1.Message != nil || fd.L1.ContainingOneof != nil
}
return fd.L1.Cardinality != protoreflect.Repeated && (fd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 || fd.L1.Message != nil || fd.L1.ContainingOneof != nil)
}
func (fd *Field) HasOptionalKeyword() bool {

View File

@ -12,6 +12,12 @@ import (
const File_google_protobuf_descriptor_proto = "google/protobuf/descriptor.proto"
// Full and short names for google.protobuf.Edition.
const (
Edition_enum_fullname = "google.protobuf.Edition"
Edition_enum_name = "Edition"
)
// Names for google.protobuf.FileDescriptorSet.
const (
FileDescriptorSet_message_name protoreflect.Name = "FileDescriptorSet"
@ -81,7 +87,7 @@ const (
FileDescriptorProto_Options_field_number protoreflect.FieldNumber = 8
FileDescriptorProto_SourceCodeInfo_field_number protoreflect.FieldNumber = 9
FileDescriptorProto_Syntax_field_number protoreflect.FieldNumber = 12
FileDescriptorProto_Edition_field_number protoreflect.FieldNumber = 13
FileDescriptorProto_Edition_field_number protoreflect.FieldNumber = 14
)
// Names for google.protobuf.DescriptorProto.
@ -183,13 +189,58 @@ const (
// Field names for google.protobuf.ExtensionRangeOptions.
const (
ExtensionRangeOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
ExtensionRangeOptions_Declaration_field_name protoreflect.Name = "declaration"
ExtensionRangeOptions_Features_field_name protoreflect.Name = "features"
ExtensionRangeOptions_Verification_field_name protoreflect.Name = "verification"
ExtensionRangeOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.uninterpreted_option"
ExtensionRangeOptions_Declaration_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.declaration"
ExtensionRangeOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.features"
ExtensionRangeOptions_Verification_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.verification"
)
// Field numbers for google.protobuf.ExtensionRangeOptions.
const (
ExtensionRangeOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
ExtensionRangeOptions_Declaration_field_number protoreflect.FieldNumber = 2
ExtensionRangeOptions_Features_field_number protoreflect.FieldNumber = 50
ExtensionRangeOptions_Verification_field_number protoreflect.FieldNumber = 3
)
// Full and short names for google.protobuf.ExtensionRangeOptions.VerificationState.
const (
ExtensionRangeOptions_VerificationState_enum_fullname = "google.protobuf.ExtensionRangeOptions.VerificationState"
ExtensionRangeOptions_VerificationState_enum_name = "VerificationState"
)
// Names for google.protobuf.ExtensionRangeOptions.Declaration.
const (
ExtensionRangeOptions_Declaration_message_name protoreflect.Name = "Declaration"
ExtensionRangeOptions_Declaration_message_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.Declaration"
)
// Field names for google.protobuf.ExtensionRangeOptions.Declaration.
const (
ExtensionRangeOptions_Declaration_Number_field_name protoreflect.Name = "number"
ExtensionRangeOptions_Declaration_FullName_field_name protoreflect.Name = "full_name"
ExtensionRangeOptions_Declaration_Type_field_name protoreflect.Name = "type"
ExtensionRangeOptions_Declaration_Reserved_field_name protoreflect.Name = "reserved"
ExtensionRangeOptions_Declaration_Repeated_field_name protoreflect.Name = "repeated"
ExtensionRangeOptions_Declaration_Number_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.Declaration.number"
ExtensionRangeOptions_Declaration_FullName_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.Declaration.full_name"
ExtensionRangeOptions_Declaration_Type_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.Declaration.type"
ExtensionRangeOptions_Declaration_Reserved_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.Declaration.reserved"
ExtensionRangeOptions_Declaration_Repeated_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.Declaration.repeated"
)
// Field numbers for google.protobuf.ExtensionRangeOptions.Declaration.
const (
ExtensionRangeOptions_Declaration_Number_field_number protoreflect.FieldNumber = 1
ExtensionRangeOptions_Declaration_FullName_field_number protoreflect.FieldNumber = 2
ExtensionRangeOptions_Declaration_Type_field_number protoreflect.FieldNumber = 3
ExtensionRangeOptions_Declaration_Reserved_field_number protoreflect.FieldNumber = 5
ExtensionRangeOptions_Declaration_Repeated_field_number protoreflect.FieldNumber = 6
)
// Names for google.protobuf.FieldDescriptorProto.
@ -433,6 +484,7 @@ const (
FileOptions_PhpNamespace_field_name protoreflect.Name = "php_namespace"
FileOptions_PhpMetadataNamespace_field_name protoreflect.Name = "php_metadata_namespace"
FileOptions_RubyPackage_field_name protoreflect.Name = "ruby_package"
FileOptions_Features_field_name protoreflect.Name = "features"
FileOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
FileOptions_JavaPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_package"
@ -455,6 +507,7 @@ const (
FileOptions_PhpNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_namespace"
FileOptions_PhpMetadataNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_metadata_namespace"
FileOptions_RubyPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.ruby_package"
FileOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.features"
FileOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.uninterpreted_option"
)
@ -480,6 +533,7 @@ const (
FileOptions_PhpNamespace_field_number protoreflect.FieldNumber = 41
FileOptions_PhpMetadataNamespace_field_number protoreflect.FieldNumber = 44
FileOptions_RubyPackage_field_number protoreflect.FieldNumber = 45
FileOptions_Features_field_number protoreflect.FieldNumber = 50
FileOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -502,6 +556,7 @@ const (
MessageOptions_Deprecated_field_name protoreflect.Name = "deprecated"
MessageOptions_MapEntry_field_name protoreflect.Name = "map_entry"
MessageOptions_DeprecatedLegacyJsonFieldConflicts_field_name protoreflect.Name = "deprecated_legacy_json_field_conflicts"
MessageOptions_Features_field_name protoreflect.Name = "features"
MessageOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
MessageOptions_MessageSetWireFormat_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.message_set_wire_format"
@ -509,6 +564,7 @@ const (
MessageOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.deprecated"
MessageOptions_MapEntry_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.map_entry"
MessageOptions_DeprecatedLegacyJsonFieldConflicts_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.deprecated_legacy_json_field_conflicts"
MessageOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.features"
MessageOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.uninterpreted_option"
)
@ -519,6 +575,7 @@ const (
MessageOptions_Deprecated_field_number protoreflect.FieldNumber = 3
MessageOptions_MapEntry_field_number protoreflect.FieldNumber = 7
MessageOptions_DeprecatedLegacyJsonFieldConflicts_field_number protoreflect.FieldNumber = 11
MessageOptions_Features_field_number protoreflect.FieldNumber = 12
MessageOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -539,7 +596,9 @@ const (
FieldOptions_Weak_field_name protoreflect.Name = "weak"
FieldOptions_DebugRedact_field_name protoreflect.Name = "debug_redact"
FieldOptions_Retention_field_name protoreflect.Name = "retention"
FieldOptions_Target_field_name protoreflect.Name = "target"
FieldOptions_Targets_field_name protoreflect.Name = "targets"
FieldOptions_EditionDefaults_field_name protoreflect.Name = "edition_defaults"
FieldOptions_Features_field_name protoreflect.Name = "features"
FieldOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
FieldOptions_Ctype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.ctype"
@ -551,7 +610,9 @@ const (
FieldOptions_Weak_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.weak"
FieldOptions_DebugRedact_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.debug_redact"
FieldOptions_Retention_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.retention"
FieldOptions_Target_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.target"
FieldOptions_Targets_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.targets"
FieldOptions_EditionDefaults_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.edition_defaults"
FieldOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.features"
FieldOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.uninterpreted_option"
)
@ -566,7 +627,9 @@ const (
FieldOptions_Weak_field_number protoreflect.FieldNumber = 10
FieldOptions_DebugRedact_field_number protoreflect.FieldNumber = 16
FieldOptions_Retention_field_number protoreflect.FieldNumber = 17
FieldOptions_Target_field_number protoreflect.FieldNumber = 18
FieldOptions_Targets_field_number protoreflect.FieldNumber = 19
FieldOptions_EditionDefaults_field_number protoreflect.FieldNumber = 20
FieldOptions_Features_field_number protoreflect.FieldNumber = 21
FieldOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -594,6 +657,27 @@ const (
FieldOptions_OptionTargetType_enum_name = "OptionTargetType"
)
// Names for google.protobuf.FieldOptions.EditionDefault.
const (
FieldOptions_EditionDefault_message_name protoreflect.Name = "EditionDefault"
FieldOptions_EditionDefault_message_fullname protoreflect.FullName = "google.protobuf.FieldOptions.EditionDefault"
)
// Field names for google.protobuf.FieldOptions.EditionDefault.
const (
FieldOptions_EditionDefault_Edition_field_name protoreflect.Name = "edition"
FieldOptions_EditionDefault_Value_field_name protoreflect.Name = "value"
FieldOptions_EditionDefault_Edition_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.EditionDefault.edition"
FieldOptions_EditionDefault_Value_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.EditionDefault.value"
)
// Field numbers for google.protobuf.FieldOptions.EditionDefault.
const (
FieldOptions_EditionDefault_Edition_field_number protoreflect.FieldNumber = 3
FieldOptions_EditionDefault_Value_field_number protoreflect.FieldNumber = 2
)
// Names for google.protobuf.OneofOptions.
const (
OneofOptions_message_name protoreflect.Name = "OneofOptions"
@ -602,13 +686,16 @@ const (
// Field names for google.protobuf.OneofOptions.
const (
OneofOptions_Features_field_name protoreflect.Name = "features"
OneofOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
OneofOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.OneofOptions.features"
OneofOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.OneofOptions.uninterpreted_option"
)
// Field numbers for google.protobuf.OneofOptions.
const (
OneofOptions_Features_field_number protoreflect.FieldNumber = 1
OneofOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -623,11 +710,13 @@ const (
EnumOptions_AllowAlias_field_name protoreflect.Name = "allow_alias"
EnumOptions_Deprecated_field_name protoreflect.Name = "deprecated"
EnumOptions_DeprecatedLegacyJsonFieldConflicts_field_name protoreflect.Name = "deprecated_legacy_json_field_conflicts"
EnumOptions_Features_field_name protoreflect.Name = "features"
EnumOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
EnumOptions_AllowAlias_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.allow_alias"
EnumOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.deprecated"
EnumOptions_DeprecatedLegacyJsonFieldConflicts_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.deprecated_legacy_json_field_conflicts"
EnumOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.features"
EnumOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.uninterpreted_option"
)
@ -636,6 +725,7 @@ const (
EnumOptions_AllowAlias_field_number protoreflect.FieldNumber = 2
EnumOptions_Deprecated_field_number protoreflect.FieldNumber = 3
EnumOptions_DeprecatedLegacyJsonFieldConflicts_field_number protoreflect.FieldNumber = 6
EnumOptions_Features_field_number protoreflect.FieldNumber = 7
EnumOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -648,15 +738,21 @@ const (
// Field names for google.protobuf.EnumValueOptions.
const (
EnumValueOptions_Deprecated_field_name protoreflect.Name = "deprecated"
EnumValueOptions_Features_field_name protoreflect.Name = "features"
EnumValueOptions_DebugRedact_field_name protoreflect.Name = "debug_redact"
EnumValueOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
EnumValueOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.deprecated"
EnumValueOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.features"
EnumValueOptions_DebugRedact_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.debug_redact"
EnumValueOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.uninterpreted_option"
)
// Field numbers for google.protobuf.EnumValueOptions.
const (
EnumValueOptions_Deprecated_field_number protoreflect.FieldNumber = 1
EnumValueOptions_Features_field_number protoreflect.FieldNumber = 2
EnumValueOptions_DebugRedact_field_number protoreflect.FieldNumber = 3
EnumValueOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -668,15 +764,18 @@ const (
// Field names for google.protobuf.ServiceOptions.
const (
ServiceOptions_Features_field_name protoreflect.Name = "features"
ServiceOptions_Deprecated_field_name protoreflect.Name = "deprecated"
ServiceOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
ServiceOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.features"
ServiceOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.deprecated"
ServiceOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.uninterpreted_option"
)
// Field numbers for google.protobuf.ServiceOptions.
const (
ServiceOptions_Features_field_number protoreflect.FieldNumber = 34
ServiceOptions_Deprecated_field_number protoreflect.FieldNumber = 33
ServiceOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -691,10 +790,12 @@ const (
const (
MethodOptions_Deprecated_field_name protoreflect.Name = "deprecated"
MethodOptions_IdempotencyLevel_field_name protoreflect.Name = "idempotency_level"
MethodOptions_Features_field_name protoreflect.Name = "features"
MethodOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option"
MethodOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.deprecated"
MethodOptions_IdempotencyLevel_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.idempotency_level"
MethodOptions_Features_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.features"
MethodOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.uninterpreted_option"
)
@ -702,6 +803,7 @@ const (
const (
MethodOptions_Deprecated_field_number protoreflect.FieldNumber = 33
MethodOptions_IdempotencyLevel_field_number protoreflect.FieldNumber = 34
MethodOptions_Features_field_number protoreflect.FieldNumber = 35
MethodOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999
)
@ -768,6 +870,120 @@ const (
UninterpretedOption_NamePart_IsExtension_field_number protoreflect.FieldNumber = 2
)
// Names for google.protobuf.FeatureSet.
const (
FeatureSet_message_name protoreflect.Name = "FeatureSet"
FeatureSet_message_fullname protoreflect.FullName = "google.protobuf.FeatureSet"
)
// Field names for google.protobuf.FeatureSet.
const (
FeatureSet_FieldPresence_field_name protoreflect.Name = "field_presence"
FeatureSet_EnumType_field_name protoreflect.Name = "enum_type"
FeatureSet_RepeatedFieldEncoding_field_name protoreflect.Name = "repeated_field_encoding"
FeatureSet_Utf8Validation_field_name protoreflect.Name = "utf8_validation"
FeatureSet_MessageEncoding_field_name protoreflect.Name = "message_encoding"
FeatureSet_JsonFormat_field_name protoreflect.Name = "json_format"
FeatureSet_FieldPresence_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.field_presence"
FeatureSet_EnumType_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.enum_type"
FeatureSet_RepeatedFieldEncoding_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.repeated_field_encoding"
FeatureSet_Utf8Validation_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.utf8_validation"
FeatureSet_MessageEncoding_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.message_encoding"
FeatureSet_JsonFormat_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.json_format"
)
// Field numbers for google.protobuf.FeatureSet.
const (
FeatureSet_FieldPresence_field_number protoreflect.FieldNumber = 1
FeatureSet_EnumType_field_number protoreflect.FieldNumber = 2
FeatureSet_RepeatedFieldEncoding_field_number protoreflect.FieldNumber = 3
FeatureSet_Utf8Validation_field_number protoreflect.FieldNumber = 4
FeatureSet_MessageEncoding_field_number protoreflect.FieldNumber = 5
FeatureSet_JsonFormat_field_number protoreflect.FieldNumber = 6
)
// Full and short names for google.protobuf.FeatureSet.FieldPresence.
const (
FeatureSet_FieldPresence_enum_fullname = "google.protobuf.FeatureSet.FieldPresence"
FeatureSet_FieldPresence_enum_name = "FieldPresence"
)
// Full and short names for google.protobuf.FeatureSet.EnumType.
const (
FeatureSet_EnumType_enum_fullname = "google.protobuf.FeatureSet.EnumType"
FeatureSet_EnumType_enum_name = "EnumType"
)
// Full and short names for google.protobuf.FeatureSet.RepeatedFieldEncoding.
const (
FeatureSet_RepeatedFieldEncoding_enum_fullname = "google.protobuf.FeatureSet.RepeatedFieldEncoding"
FeatureSet_RepeatedFieldEncoding_enum_name = "RepeatedFieldEncoding"
)
// Full and short names for google.protobuf.FeatureSet.Utf8Validation.
const (
FeatureSet_Utf8Validation_enum_fullname = "google.protobuf.FeatureSet.Utf8Validation"
FeatureSet_Utf8Validation_enum_name = "Utf8Validation"
)
// Full and short names for google.protobuf.FeatureSet.MessageEncoding.
const (
FeatureSet_MessageEncoding_enum_fullname = "google.protobuf.FeatureSet.MessageEncoding"
FeatureSet_MessageEncoding_enum_name = "MessageEncoding"
)
// Full and short names for google.protobuf.FeatureSet.JsonFormat.
const (
FeatureSet_JsonFormat_enum_fullname = "google.protobuf.FeatureSet.JsonFormat"
FeatureSet_JsonFormat_enum_name = "JsonFormat"
)
// Names for google.protobuf.FeatureSetDefaults.
const (
FeatureSetDefaults_message_name protoreflect.Name = "FeatureSetDefaults"
FeatureSetDefaults_message_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults"
)
// Field names for google.protobuf.FeatureSetDefaults.
const (
FeatureSetDefaults_Defaults_field_name protoreflect.Name = "defaults"
FeatureSetDefaults_MinimumEdition_field_name protoreflect.Name = "minimum_edition"
FeatureSetDefaults_MaximumEdition_field_name protoreflect.Name = "maximum_edition"
FeatureSetDefaults_Defaults_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.defaults"
FeatureSetDefaults_MinimumEdition_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.minimum_edition"
FeatureSetDefaults_MaximumEdition_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.maximum_edition"
)
// Field numbers for google.protobuf.FeatureSetDefaults.
const (
FeatureSetDefaults_Defaults_field_number protoreflect.FieldNumber = 1
FeatureSetDefaults_MinimumEdition_field_number protoreflect.FieldNumber = 4
FeatureSetDefaults_MaximumEdition_field_number protoreflect.FieldNumber = 5
)
// Names for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.
const (
FeatureSetDefaults_FeatureSetEditionDefault_message_name protoreflect.Name = "FeatureSetEditionDefault"
FeatureSetDefaults_FeatureSetEditionDefault_message_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault"
)
// Field names for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.
const (
FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_name protoreflect.Name = "edition"
FeatureSetDefaults_FeatureSetEditionDefault_Features_field_name protoreflect.Name = "features"
FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.edition"
FeatureSetDefaults_FeatureSetEditionDefault_Features_field_fullname protoreflect.FullName = "google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.features"
)
// Field numbers for google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.
const (
FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_number protoreflect.FieldNumber = 3
FeatureSetDefaults_FeatureSetEditionDefault_Features_field_number protoreflect.FieldNumber = 2
)
// Names for google.protobuf.SourceCodeInfo.
const (
SourceCodeInfo_message_name protoreflect.Name = "SourceCodeInfo"

View File

@ -32,6 +32,7 @@ const (
Type_Options_field_name protoreflect.Name = "options"
Type_SourceContext_field_name protoreflect.Name = "source_context"
Type_Syntax_field_name protoreflect.Name = "syntax"
Type_Edition_field_name protoreflect.Name = "edition"
Type_Name_field_fullname protoreflect.FullName = "google.protobuf.Type.name"
Type_Fields_field_fullname protoreflect.FullName = "google.protobuf.Type.fields"
@ -39,6 +40,7 @@ const (
Type_Options_field_fullname protoreflect.FullName = "google.protobuf.Type.options"
Type_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Type.source_context"
Type_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Type.syntax"
Type_Edition_field_fullname protoreflect.FullName = "google.protobuf.Type.edition"
)
// Field numbers for google.protobuf.Type.
@ -49,6 +51,7 @@ const (
Type_Options_field_number protoreflect.FieldNumber = 4
Type_SourceContext_field_number protoreflect.FieldNumber = 5
Type_Syntax_field_number protoreflect.FieldNumber = 6
Type_Edition_field_number protoreflect.FieldNumber = 7
)
// Names for google.protobuf.Field.
@ -121,12 +124,14 @@ const (
Enum_Options_field_name protoreflect.Name = "options"
Enum_SourceContext_field_name protoreflect.Name = "source_context"
Enum_Syntax_field_name protoreflect.Name = "syntax"
Enum_Edition_field_name protoreflect.Name = "edition"
Enum_Name_field_fullname protoreflect.FullName = "google.protobuf.Enum.name"
Enum_Enumvalue_field_fullname protoreflect.FullName = "google.protobuf.Enum.enumvalue"
Enum_Options_field_fullname protoreflect.FullName = "google.protobuf.Enum.options"
Enum_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Enum.source_context"
Enum_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Enum.syntax"
Enum_Edition_field_fullname protoreflect.FullName = "google.protobuf.Enum.edition"
)
// Field numbers for google.protobuf.Enum.
@ -136,6 +141,7 @@ const (
Enum_Options_field_number protoreflect.FieldNumber = 3
Enum_SourceContext_field_number protoreflect.FieldNumber = 4
Enum_Syntax_field_number protoreflect.FieldNumber = 5
Enum_Edition_field_number protoreflect.FieldNumber = 6
)
// Names for google.protobuf.EnumValue.

View File

@ -162,11 +162,20 @@ func appendBoolSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions
func consumeBoolSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.BoolSlice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growBoolSlice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -732,11 +741,20 @@ func appendInt32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOption
func consumeInt32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Int32Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growInt32Slice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -1138,11 +1156,20 @@ func appendSint32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptio
func consumeSint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Int32Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growInt32Slice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -1544,11 +1571,20 @@ func appendUint32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptio
func consumeUint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Uint32Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growUint32Slice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -1950,11 +1986,20 @@ func appendInt64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOption
func consumeInt64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Int64Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growInt64Slice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -2356,11 +2401,20 @@ func appendSint64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptio
func consumeSint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Int64Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growInt64Slice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -2762,11 +2816,20 @@ func appendUint64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptio
func consumeUint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Uint64Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := 0
for _, v := range b {
if v < 0x80 {
count++
}
}
if count > 0 {
p.growUint64Slice(count)
}
s := *sp
for len(b) > 0 {
var v uint64
var n int
@ -3145,11 +3208,15 @@ func appendSfixed32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOpt
func consumeSfixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Int32Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := len(b) / protowire.SizeFixed32()
if count > 0 {
p.growInt32Slice(count)
}
s := *sp
for len(b) > 0 {
v, n := protowire.ConsumeFixed32(b)
if n < 0 {
@ -3461,11 +3528,15 @@ func appendFixed32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOpti
func consumeFixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Uint32Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := len(b) / protowire.SizeFixed32()
if count > 0 {
p.growUint32Slice(count)
}
s := *sp
for len(b) > 0 {
v, n := protowire.ConsumeFixed32(b)
if n < 0 {
@ -3777,11 +3848,15 @@ func appendFloatSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOption
func consumeFloatSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Float32Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := len(b) / protowire.SizeFixed32()
if count > 0 {
p.growFloat32Slice(count)
}
s := *sp
for len(b) > 0 {
v, n := protowire.ConsumeFixed32(b)
if n < 0 {
@ -4093,11 +4168,15 @@ func appendSfixed64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOpt
func consumeSfixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Int64Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := len(b) / protowire.SizeFixed64()
if count > 0 {
p.growInt64Slice(count)
}
s := *sp
for len(b) > 0 {
v, n := protowire.ConsumeFixed64(b)
if n < 0 {
@ -4409,11 +4488,15 @@ func appendFixed64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOpti
func consumeFixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Uint64Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := len(b) / protowire.SizeFixed64()
if count > 0 {
p.growUint64Slice(count)
}
s := *sp
for len(b) > 0 {
v, n := protowire.ConsumeFixed64(b)
if n < 0 {
@ -4725,11 +4808,15 @@ func appendDoubleSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptio
func consumeDoubleSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {
sp := p.Float64Slice()
if wtyp == protowire.BytesType {
s := *sp
b, n := protowire.ConsumeBytes(b)
if n < 0 {
return out, errDecode
}
count := len(b) / protowire.SizeFixed64()
if count > 0 {
p.growFloat64Slice(count)
}
s := *sp
for len(b) > 0 {
v, n := protowire.ConsumeFixed64(b)
if n < 0 {

View File

@ -206,13 +206,18 @@ func aberrantLoadMessageDescReentrant(t reflect.Type, name protoreflect.FullName
// Obtain a list of oneof wrapper types.
var oneofWrappers []reflect.Type
for _, method := range []string{"XXX_OneofFuncs", "XXX_OneofWrappers"} {
if fn, ok := t.MethodByName(method); ok {
for _, v := range fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))}) {
if vs, ok := v.Interface().([]interface{}); ok {
for _, v := range vs {
oneofWrappers = append(oneofWrappers, reflect.TypeOf(v))
}
methods := make([]reflect.Method, 0, 2)
if m, ok := t.MethodByName("XXX_OneofFuncs"); ok {
methods = append(methods, m)
}
if m, ok := t.MethodByName("XXX_OneofWrappers"); ok {
methods = append(methods, m)
}
for _, fn := range methods {
for _, v := range fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))}) {
if vs, ok := v.Interface().([]interface{}); ok {
for _, v := range vs {
oneofWrappers = append(oneofWrappers, reflect.TypeOf(v))
}
}
}

View File

@ -192,12 +192,17 @@ fieldLoop:
// Derive a mapping of oneof wrappers to fields.
oneofWrappers := mi.OneofWrappers
for _, method := range []string{"XXX_OneofFuncs", "XXX_OneofWrappers"} {
if fn, ok := reflect.PtrTo(t).MethodByName(method); ok {
for _, v := range fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))}) {
if vs, ok := v.Interface().([]interface{}); ok {
oneofWrappers = vs
}
methods := make([]reflect.Method, 0, 2)
if m, ok := reflect.PtrTo(t).MethodByName("XXX_OneofFuncs"); ok {
methods = append(methods, m)
}
if m, ok := reflect.PtrTo(t).MethodByName("XXX_OneofWrappers"); ok {
methods = append(methods, m)
}
for _, fn := range methods {
for _, v := range fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))}) {
if vs, ok := v.Interface().([]interface{}); ok {
oneofWrappers = vs
}
}
}

View File

@ -159,6 +159,42 @@ func (p pointer) SetPointer(v pointer) {
p.v.Elem().Set(v.v)
}
func growSlice(p pointer, addCap int) {
// TODO: Once we only support Go 1.20 and newer, use reflect.Grow.
in := p.v.Elem()
out := reflect.MakeSlice(in.Type(), in.Len(), in.Len()+addCap)
reflect.Copy(out, in)
p.v.Elem().Set(out)
}
func (p pointer) growBoolSlice(addCap int) {
growSlice(p, addCap)
}
func (p pointer) growInt32Slice(addCap int) {
growSlice(p, addCap)
}
func (p pointer) growUint32Slice(addCap int) {
growSlice(p, addCap)
}
func (p pointer) growInt64Slice(addCap int) {
growSlice(p, addCap)
}
func (p pointer) growUint64Slice(addCap int) {
growSlice(p, addCap)
}
func (p pointer) growFloat64Slice(addCap int) {
growSlice(p, addCap)
}
func (p pointer) growFloat32Slice(addCap int) {
growSlice(p, addCap)
}
func (Export) MessageStateOf(p Pointer) *messageState { panic("not supported") }
func (ms *messageState) pointer() pointer { panic("not supported") }
func (ms *messageState) messageInfo() *MessageInfo { panic("not supported") }

View File

@ -138,6 +138,46 @@ func (p pointer) SetPointer(v pointer) {
*(*unsafe.Pointer)(p.p) = (unsafe.Pointer)(v.p)
}
func (p pointer) growBoolSlice(addCap int) {
sp := p.BoolSlice()
s := make([]bool, 0, addCap+len(*sp))
s = s[:len(*sp)]
copy(s, *sp)
*sp = s
}
func (p pointer) growInt32Slice(addCap int) {
sp := p.Int32Slice()
s := make([]int32, 0, addCap+len(*sp))
s = s[:len(*sp)]
copy(s, *sp)
*sp = s
}
func (p pointer) growUint32Slice(addCap int) {
p.growInt32Slice(addCap)
}
func (p pointer) growFloat32Slice(addCap int) {
p.growInt32Slice(addCap)
}
func (p pointer) growInt64Slice(addCap int) {
sp := p.Int64Slice()
s := make([]int64, 0, addCap+len(*sp))
s = s[:len(*sp)]
copy(s, *sp)
*sp = s
}
func (p pointer) growUint64Slice(addCap int) {
p.growInt64Slice(addCap)
}
func (p pointer) growFloat64Slice(addCap int) {
p.growInt64Slice(addCap)
}
// Static check that MessageState does not exceed the size of a pointer.
const _ = uint(unsafe.Sizeof(unsafe.Pointer(nil)) - unsafe.Sizeof(MessageState{}))

View File

@ -33,7 +33,7 @@ var (
return !inOneof(ox) && inOneof(oy)
}
// Fields in disjoint oneof sets are sorted by declaration index.
if ox != nil && oy != nil && ox != oy {
if inOneof(ox) && inOneof(oy) && ox != oy {
return ox.Index() < oy.Index()
}
// Fields sorted by field number.

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego && !appengine
// +build !purego,!appengine
//go:build !purego && !appengine && !go1.21
// +build !purego,!appengine,!go1.21
package strs

View File

@ -0,0 +1,74 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego && !appengine && go1.21
// +build !purego,!appengine,go1.21
package strs
import (
"unsafe"
"google.golang.org/protobuf/reflect/protoreflect"
)
// UnsafeString returns an unsafe string reference of b.
// The caller must treat the input slice as immutable.
//
// WARNING: Use carefully. The returned result must not leak to the end user
// unless the input slice is provably immutable.
func UnsafeString(b []byte) string {
return unsafe.String(unsafe.SliceData(b), len(b))
}
// UnsafeBytes returns an unsafe bytes slice reference of s.
// The caller must treat returned slice as immutable.
//
// WARNING: Use carefully. The returned result must not leak to the end user.
func UnsafeBytes(s string) []byte {
return unsafe.Slice(unsafe.StringData(s), len(s))
}
// Builder builds a set of strings with shared lifetime.
// This differs from strings.Builder, which is for building a single string.
type Builder struct {
buf []byte
}
// AppendFullName is equivalent to protoreflect.FullName.Append,
// but optimized for large batches where each name has a shared lifetime.
func (sb *Builder) AppendFullName(prefix protoreflect.FullName, name protoreflect.Name) protoreflect.FullName {
n := len(prefix) + len(".") + len(name)
if len(prefix) == 0 {
n -= len(".")
}
sb.grow(n)
sb.buf = append(sb.buf, prefix...)
sb.buf = append(sb.buf, '.')
sb.buf = append(sb.buf, name...)
return protoreflect.FullName(sb.last(n))
}
// MakeString is equivalent to string(b), but optimized for large batches
// with a shared lifetime.
func (sb *Builder) MakeString(b []byte) string {
sb.grow(len(b))
sb.buf = append(sb.buf, b...)
return sb.last(len(b))
}
func (sb *Builder) grow(n int) {
if cap(sb.buf)-len(sb.buf) >= n {
return
}
// Unlike strings.Builder, we do not need to copy over the contents
// of the old buffer since our builder provides no API for
// retrieving previously created strings.
sb.buf = make([]byte, 0, 2*(cap(sb.buf)+n))
}
func (sb *Builder) last(n int) string {
return UnsafeString(sb.buf[len(sb.buf)-n:])
}

View File

@ -51,7 +51,7 @@ import (
// 10. Send out the CL for review and submit it.
const (
Major = 1
Minor = 30
Minor = 32
Patch = 0
PreRelease = ""
)