Skip to content

Commit

Permalink
rename SensitiveByteSlice to Sensitive (#8553)
Browse files Browse the repository at this point in the history
  • Loading branch information
mangalaman93 authored Jan 3, 2023
1 parent 32d5c9f commit 4c1b575
Show file tree
Hide file tree
Showing 30 changed files with 77 additions and 70 deletions.
4 changes: 2 additions & 2 deletions chunker/chunk.go
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ func slurpQuoted(r *bufio.Reader, out *bytes.Buffer) error {
// and decompressed automatically even without the gz extension. The key, if non-nil,
// is used to decrypt the file. The caller is responsible for calling the returned cleanup
// function when done with the reader.
func FileReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func()) {
func FileReader(file string, key x.Sensitive) (*bufio.Reader, func()) {
var f *os.File
var err error
if file == "-" {
Expand All @@ -367,7 +367,7 @@ func FileReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func()) {
}

// StreamReader returns a bufio given a ReadCloser. The file is passed just to check for .gz files
func StreamReader(file string, key x.SensitiveByteSlice, f io.ReadCloser) (
func StreamReader(file string, key x.Sensitive, f io.ReadCloser) (
rd *bufio.Reader, cleanup func()) {
cleanup = func() { _ = f.Close() }

Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/bulk/loader.go
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ type options struct {

// ........... Badger options ..........
// EncryptionKey is the key used for encryption. Enterprise only feature.
EncryptionKey x.SensitiveByteSlice
EncryptionKey x.Sensitive
// Badger options.
Badger badger.Options
}
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/debug/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ type flagOptions struct {
readTs uint64
sizeHistogram bool
noKeys bool
key x.SensitiveByteSlice
key x.Sensitive

// Options related to the WAL.
wdir string
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/decrypt/decrypt.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import (

type options struct {
// keyfile comes from the encryption or Vault flags
keyfile x.SensitiveByteSlice
keyfile x.Sensitive
file string
output string
}
Expand Down
6 changes: 3 additions & 3 deletions dgraph/cmd/live/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ type options struct {
ludicrousMode bool
upsertPredicate string
tmpDir string
key x.SensitiveByteSlice
key x.Sensitive
namespaceToLoad uint64
preserveNs bool
}
Expand Down Expand Up @@ -234,7 +234,7 @@ func validateSchema(sch string, namespaces map[uint64]struct{}) error {
}

// processSchemaFile process schema for a given gz file.
func (l *loader) processSchemaFile(ctx context.Context, file string, key x.SensitiveByteSlice,
func (l *loader) processSchemaFile(ctx context.Context, file string, key x.Sensitive,
dgraphClient *dgo.Dgraph) error {
fmt.Printf("\nProcessing schema file %q\n", file)
if len(opt.authToken) > 0 {
Expand Down Expand Up @@ -461,7 +461,7 @@ func (l *loader) allocateUids(nqs []*api.NQuad) {

// processFile forwards a file to the RDF or JSON processor as appropriate
func (l *loader) processFile(ctx context.Context, fs filestore.FileStore, filename string,
key x.SensitiveByteSlice) error {
key x.Sensitive) error {

fmt.Printf("Processing data file %q\n", filename)

Expand Down
3 changes: 2 additions & 1 deletion ee/backup/run.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
//go:build !oss
// +build !oss

/*
Expand Down Expand Up @@ -55,7 +56,7 @@ var opt struct {
location string
pdir string
zero string
key x.SensitiveByteSlice
key x.Sensitive
forceZero bool
destination string
format string
Expand Down
5 changes: 3 additions & 2 deletions ee/enc/util_ee.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
//go:build !oss
// +build !oss

/*
Expand Down Expand Up @@ -26,7 +27,7 @@ import (
var EeBuild = true

// GetWriter wraps a crypto StreamWriter using the input key on the input Writer.
func GetWriter(key x.SensitiveByteSlice, w io.Writer) (io.Writer, error) {
func GetWriter(key x.Sensitive, w io.Writer) (io.Writer, error) {
// No encryption, return the input writer as is.
if key == nil {
return w, nil
Expand All @@ -49,7 +50,7 @@ func GetWriter(key x.SensitiveByteSlice, w io.Writer) (io.Writer, error) {
}

// GetReader wraps a crypto StreamReader using the input key on the input Reader.
func GetReader(key x.SensitiveByteSlice, r io.Reader) (io.Reader, error) {
func GetReader(key x.Sensitive, r io.Reader) (io.Reader, error) {
// No encryption, return input reader as is.
if key == nil {
return r, nil
Expand Down
4 changes: 2 additions & 2 deletions ee/flags.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@ import (

// Keys holds the configuration for ACL and encryption.
type Keys struct {
AclKey x.SensitiveByteSlice
AclKey x.Sensitive
AclAccessTtl time.Duration
AclRefreshTtl time.Duration
EncKey x.SensitiveByteSlice
EncKey x.Sensitive
}

const (
Expand Down
9 changes: 5 additions & 4 deletions ee/vault_ee.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
//go:build !oss
// +build !oss

/*
Expand Down Expand Up @@ -25,7 +26,7 @@ import (
"github.com/spf13/viper"
)

func vaultGetKeys(config *viper.Viper) (aclKey, encKey x.SensitiveByteSlice) {
func vaultGetKeys(config *viper.Viper) (aclKey, encKey x.Sensitive) {
// Avoid querying Vault unless the flag has been explicitly set.
if !config.IsSet(flagVault) {
return
Expand Down Expand Up @@ -92,7 +93,7 @@ func vaultGetKvStore(client *api.Client, path string) (vaultKvStore, error) {
}

// getSensitiveBytes retrieves a value from a kvStore, decoding it if necessary.
func (kv vaultKvStore) getSensitiveBytes(field, format string) (x.SensitiveByteSlice, error) {
func (kv vaultKvStore) getSensitiveBytes(field, format string) (x.Sensitive, error) {
value, ok := kv[field]
if !ok {
return nil, fmt.Errorf("vault: key '%s' not found", field)
Expand All @@ -104,7 +105,7 @@ func (kv vaultKvStore) getSensitiveBytes(field, format string) (x.SensitiveByteS
}

// Decode value if necessary.
var valueBytes x.SensitiveByteSlice
var valueBytes x.Sensitive
var err error
if format == "base64" {
valueBytes, err = base64.StdEncoding.DecodeString(valueString)
Expand All @@ -113,7 +114,7 @@ func (kv vaultKvStore) getSensitiveBytes(field, format string) (x.SensitiveByteS
"vault: key '%s' could not be decoded as a base64 string: %s", field, err)
}
} else {
valueBytes = x.SensitiveByteSlice(valueString)
valueBytes = x.Sensitive(valueString)
}

return valueBytes, nil
Expand Down
2 changes: 1 addition & 1 deletion filestore/filestore.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ type FileStore interface {
Open(path string) (io.ReadCloser, error)
Exists(path string) bool
FindDataFiles(str string, ext []string) []string
ChunkReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func())
ChunkReader(file string, key x.Sensitive) (*bufio.Reader, func())
}

// NewFileStore returns a new file storage. If remote, it's backed by an x.MinioClient
Expand Down
2 changes: 1 addition & 1 deletion filestore/local_files.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func (*localFiles) FindDataFiles(str string, ext []string) []string {
return x.FindDataFiles(str, ext)
}

func (*localFiles) ChunkReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func()) {
func (*localFiles) ChunkReader(file string, key x.Sensitive) (*bufio.Reader, func()) {
return chunker.FileReader(file, key)
}

Expand Down
2 changes: 1 addition & 1 deletion filestore/remote_files.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ func (rf *remoteFiles) FindDataFiles(str string, ext []string) (paths []string)
return
}

func (rf *remoteFiles) ChunkReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func()) {
func (rf *remoteFiles) ChunkReader(file string, key x.Sensitive) (*bufio.Reader, func()) {
url, err := url.Parse(file)
x.Check(err)

Expand Down
30 changes: 16 additions & 14 deletions graphql/schema/gqlschema.go
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ type directiveValidator func(
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List
secrets map[string]x.Sensitive) gqlerror.List

type searchTypeIndex struct {
gqlType string
Expand Down Expand Up @@ -553,7 +553,7 @@ func ValidatorNoOp(
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
return nil
}

Expand Down Expand Up @@ -852,7 +852,7 @@ func preGQLValidation(schema *ast.SchemaDocument) gqlerror.List {
// has fleshed out the schema structure; we just need to check if it also satisfies
// the extra rules.
func postGQLValidation(schema *ast.Schema, definitions []string,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
var errs []*gqlerror.Error

for _, defn := range definitions {
Expand Down Expand Up @@ -1313,13 +1313,14 @@ func addPatchType(schema *ast.Schema, defn *ast.Definition, providesTypeMap map[
// and defn has a field of type R, e.g. if defn is like
// `type T { ... g: R ... }`
// then a query should be able to filter on g by term search on f, like
// query {
// getT(id: 0x123) {
// ...
// g(filter: { f: { anyofterms: "something" } }, first: 10) { ... }
// ...
// }
// }
//
// query {
// getT(id: 0x123) {
// ...
// g(filter: { f: { anyofterms: "something" } }, first: 10) { ... }
// ...
// }
// }
func addFieldFilters(
schema *ast.Schema,
defn *ast.Definition,
Expand Down Expand Up @@ -1534,10 +1535,11 @@ func mergeAndAddFilters(filterTypes []string, schema *ast.Schema, filterName str
// in constructing the corresponding query
// queryT(filter: TFilter, ... )
// and in adding search to any fields of this type, like:
// type R {
// f(filter: TFilter, ... ): T
// ...
// }
//
// type R {
// f(filter: TFilter, ... ): T
// ...
// }
func addFilterType(schema *ast.Schema, defn *ast.Definition, providesTypeMap map[string]bool) {
filterName := defn.Name + "Filter"
filter := &ast.Definition{
Expand Down
22 changes: 11 additions & 11 deletions graphql/schema/rules.go
Original file line number Diff line number Diff line change
Expand Up @@ -833,7 +833,7 @@ func listValidityCheck(typ *ast.Definition, field *ast.FieldDefinition) gqlerror

func hasInverseValidation(sch *ast.Schema, typ *ast.Definition,
field *ast.FieldDefinition, dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
var errs []*gqlerror.Error

invTypeName := field.Type.Name()
Expand Down Expand Up @@ -1017,7 +1017,7 @@ func searchValidation(
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
var errs []*gqlerror.Error

arg := dir.Arguments.ForName(searchArgs)
Expand Down Expand Up @@ -1096,7 +1096,7 @@ func searchValidation(
}

func dgraphDirectiveValidation(sch *ast.Schema, typ *ast.Definition, field *ast.FieldDefinition,
dir *ast.Directive, secrets map[string]x.SensitiveByteSlice) gqlerror.List {
dir *ast.Directive, secrets map[string]x.Sensitive) gqlerror.List {
var errs []*gqlerror.Error

if isID(field) {
Expand Down Expand Up @@ -1235,7 +1235,7 @@ func passwordValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {

return passwordDirectiveValidation(sch, typ)
}
Expand All @@ -1244,7 +1244,7 @@ func lambdaDirectiveValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
// if the lambda url wasn't specified during alpha startup,
// just return that error. Don't confuse the user with errors from @custom yet.
if x.LambdaUrl(x.GalaxyNamespace) == "" {
Expand Down Expand Up @@ -1404,7 +1404,7 @@ func customDirectiveValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
var errs []*gqlerror.Error

// 1. Validating custom directive itself
Expand Down Expand Up @@ -2072,7 +2072,7 @@ func idValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {
if field.Type.String() == "String!" ||
field.Type.String() == "Int!" ||
field.Type.String() == "Int64!" {
Expand Down Expand Up @@ -2152,7 +2152,7 @@ func apolloRequiresValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {

extendsDirective := typ.Directives.ForName(apolloExtendsDirective)
if extendsDirective == nil {
Expand Down Expand Up @@ -2193,7 +2193,7 @@ func apolloProvidesValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {

fldTypeDefn := sch.Types[field.Type.Name()]
keyDirective := fldTypeDefn.Directives.ForName(apolloKeyDirective)
Expand Down Expand Up @@ -2231,7 +2231,7 @@ func apolloExternalValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {

extendsDirective := typ.Directives.ForName(apolloExtendsDirective)
if extendsDirective == nil {
Expand Down Expand Up @@ -2275,7 +2275,7 @@ func remoteResponseValidation(sch *ast.Schema,
typ *ast.Definition,
field *ast.FieldDefinition,
dir *ast.Directive,
secrets map[string]x.SensitiveByteSlice) gqlerror.List {
secrets map[string]x.Sensitive) gqlerror.List {

remoteDirectiveDefn := typ.Directives.ForName(remoteDirective)
if remoteDirectiveDefn == nil {
Expand Down
6 changes: 3 additions & 3 deletions graphql/schema/schemagen.go
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ func (s *handler) GQLSchemaWithoutApolloExtras() string {
type metaInfo struct {
// secrets are key value pairs stored in the GraphQL schema which can be added as headers
// to requests which resolve custom queries/mutations. These are extracted from # Dgraph.Secret.
secrets map[string]x.SensitiveByteSlice
secrets map[string]x.Sensitive
// extraCorsHeaders are the allowed CORS Headers in addition to x.AccessControlAllowedHeaders.
// These are parsed from the forwardHeaders specified in the @custom directive.
// The header for Dgraph.Authorization is also part of this.
Expand Down Expand Up @@ -208,7 +208,7 @@ func parseMetaInfo(sch string) (*metaInfo, error) {
scanner := bufio.NewScanner(strings.NewReader(sch))
authSecret := ""
schMetaInfo := &metaInfo{
secrets: make(map[string]x.SensitiveByteSlice),
secrets: make(map[string]x.Sensitive),
allowedCorsOrigins: make(map[string]bool),
}
var err error
Expand Down Expand Up @@ -262,7 +262,7 @@ func parseMetaInfo(sch string) (*metaInfo, error) {
val = strings.Trim(val, `"`)
key := strings.Trim(parts[2], `"`)
// lets obfuscate the value of the secrets from here on.
schMetaInfo.secrets[key] = x.SensitiveByteSlice(val)
schMetaInfo.secrets[key] = x.Sensitive(val)
}
}

Expand Down
2 changes: 1 addition & 1 deletion raftwal/log.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ const (

var (
emptyEntry = entry(make([]byte, entrySize))
encryptionKey x.SensitiveByteSlice
encryptionKey x.Sensitive
)

type entry []byte
Expand Down
Loading

0 comments on commit 4c1b575

Please sign in to comment.