Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BREAKING] Perf(restore): Implement map-reduce based restore #7664

Merged
merged 11 commits into from
Mar 29, 2021
4 changes: 2 additions & 2 deletions chunker/chunk.go
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ func slurpQuoted(r *bufio.Reader, out *bytes.Buffer) error {
// and decompressed automatically even without the gz extension. The key, if non-nil,
// is used to decrypt the file. The caller is responsible for calling the returned cleanup
// function when done with the reader.
func FileReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func()) {
func FileReader(file string, key x.Sensitive) (*bufio.Reader, func()) {
var f *os.File
var err error
if file == "-" {
Expand All @@ -367,7 +367,7 @@ func FileReader(file string, key x.SensitiveByteSlice) (*bufio.Reader, func()) {
}

// StreamReader returns a bufio given a ReadCloser. The file is passed just to check for .gz files
func StreamReader(file string, key x.SensitiveByteSlice, f io.ReadCloser) (
func StreamReader(file string, key x.Sensitive, f io.ReadCloser) (
rd *bufio.Reader, cleanup func()) {
cleanup = func() { _ = f.Close() }

Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/bulk/loader.go
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ type options struct {

// ........... Badger options ..........
// EncryptionKey is the key used for encryption. Enterprise only feature.
EncryptionKey x.SensitiveByteSlice
EncryptionKey x.Sensitive
// BadgerCompression is the compression algorithm to use while writing to badger.
BadgerCompression bo.CompressionType
// BadgerCompressionlevel is the compression level to use while writing to badger.
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/bulk/reduce.go
Original file line number Diff line number Diff line change
Expand Up @@ -485,8 +485,8 @@ func (r *reducer) reduce(partitionKeys [][]byte, mapItrs []*mapIterator, ci *cou
partitionKeys = append(partitionKeys, nil)

for i := 0; i < len(partitionKeys); i++ {
pkey := partitionKeys[i]
for _, itr := range mapItrs {
pkey := partitionKeys[i]
itr.Next(cbuf, pkey)
}
if cbuf.LenNoPadding() < 256<<20 {
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/debug/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ type flagOptions struct {
readTs uint64
sizeHistogram bool
noKeys bool
key x.SensitiveByteSlice
key x.Sensitive

// Options related to the WAL.
wdir string
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/decrypt/decrypt.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import (

type options struct {
// keyfile comes from the encryption_key_file or Vault flags
keyfile x.SensitiveByteSlice
keyfile x.Sensitive
file string
output string
}
Expand Down
6 changes: 3 additions & 3 deletions dgraph/cmd/live/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ type options struct {
ludicrousMode bool
upsertPredicate string
tmpDir string
key x.SensitiveByteSlice
key x.Sensitive
namespaceToLoad uint64
preserveNs bool
}
Expand Down Expand Up @@ -232,7 +232,7 @@ func validateSchema(sch string, namespaces map[uint64]struct{}) error {
}

// processSchemaFile process schema for a given gz file.
func (l *loader) processSchemaFile(ctx context.Context, file string, key x.SensitiveByteSlice,
func (l *loader) processSchemaFile(ctx context.Context, file string, key x.Sensitive,
dgraphClient *dgo.Dgraph) error {
fmt.Printf("\nProcessing schema file %q\n", file)
if len(opt.authToken) > 0 {
Expand Down Expand Up @@ -459,7 +459,7 @@ func (l *loader) allocateUids(nqs []*api.NQuad) {

// processFile forwards a file to the RDF or JSON processor as appropriate
func (l *loader) processFile(ctx context.Context, fs filestore.FileStore, filename string,
key x.SensitiveByteSlice) error {
key x.Sensitive) error {

fmt.Printf("Processing data file %q\n", filename)

Expand Down
1 change: 0 additions & 1 deletion dgraph/cmd/root_ee.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
func init() {
// subcommands already has the default subcommands, we append to EE ones to that.
subcommands = append(subcommands,
&backup.Restore,
&backup.LsBackup,
&backup.ExportBackup,
&acl.CmdAcl,
Expand Down
Loading