-
Notifications
You must be signed in to change notification settings - Fork 3.2k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Error running 1000s of tasks: "etcdserver: request is too large" #1186 #1264
Changes from 15 commits
cb0ab97
b365648
0af9f58
862307a
be7603f
9bedb33
1195457
c7b3d1c
835373f
5f51681
465a729
bac7d30
64f8e0c
ff7ddca
ec3cc51
fd8d8f7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -8,3 +8,4 @@ cmd/**/debug | |
hack/**/debug | ||
debug.test | ||
*.iml | ||
examples/k8s-jobs1.yaml | ||
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -8,12 +8,13 @@ import ( | |
"strings" | ||
"text/tabwriter" | ||
|
||
"github.com/argoproj/argo/errors" | ||
wfv1 "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" | ||
"github.com/argoproj/argo/util/file" | ||
"github.com/argoproj/pkg/humanize" | ||
"github.com/ghodss/yaml" | ||
"github.com/spf13/cobra" | ||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" | ||
|
||
wfv1 "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" | ||
) | ||
|
||
const onExitSuffix = "onExit" | ||
|
@@ -36,6 +37,10 @@ func NewGetCommand() *cobra.Command { | |
if err != nil { | ||
log.Fatal(err) | ||
} | ||
err = CheckAndDecompress(wf) | ||
if err != nil { | ||
log.Fatal(err) | ||
} | ||
printWorkflow(wf, output) | ||
}, | ||
} | ||
|
@@ -45,6 +50,21 @@ func NewGetCommand() *cobra.Command { | |
return command | ||
} | ||
|
||
func CheckAndDecompress(wf *wfv1.Workflow) error { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The Also nodes field is accessed in I would suggest to add method |
||
if wf.Status.CompressedNodes != "" { | ||
nodeContent, err := file.DecodeDecompressString(wf.Status.CompressedNodes) | ||
if err != nil { | ||
return errors.InternalWrapError(err) | ||
} | ||
err = json.Unmarshal([]byte(nodeContent), &wf.Status.Nodes) | ||
if err != nil { | ||
log.Fatal(err) | ||
} | ||
wf.Status.CompressedNodes = "" | ||
} | ||
return nil | ||
} | ||
|
||
func printWorkflow(wf *wfv1.Workflow, outFmt string) { | ||
switch outFmt { | ||
case "name": | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
# Stress test to test upper bounds of concurrent pods | ||
apiVersion: argoproj.io/v1alpha1 | ||
kind: Workflow | ||
metadata: | ||
generateName: pod-limits- | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please make the generate name the name of the file. It makes it difficult to debug during e2e testing which workflow failed. Also, since this is a test and not an example, it probably belongs under test/e2e/functional There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sorry these files by mistake got it in PR I will revert it |
||
spec: | ||
entrypoint: pod-limits | ||
parallelism: 5 | ||
arguments: | ||
parameters: | ||
- name: limit | ||
value: 16 | ||
|
||
templates: | ||
- name: pod-limits | ||
steps: | ||
- - name: run-pod | ||
template: run-pod | ||
withSequence: | ||
count: "{{workflow.parameters.limit}}" | ||
|
||
- name: run-pod | ||
script: | ||
image: alpine:latest | ||
command: [sh] | ||
source: head /dev/urandom | tr -dc A-Za-z0-9 | head -c 2048 ; echo '' |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -477,6 +477,9 @@ type WorkflowStatus struct { | |
// A human readable message indicating details about why the workflow is in this condition. | ||
Message string `json:"message,omitempty"` | ||
|
||
// Compressed and base64 decoded Nodes map | ||
CompressedNodes string `json:"compressedNodes,omitempty"` | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please add comment There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please add spaces after all |
||
|
||
// Nodes is a mapping between a node ID and the node's status. | ||
Nodes map[string]NodeStatus `json:"nodes,omitempty"` | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
package file | ||
|
||
import ( | ||
"archive/tar" | ||
"bytes" | ||
"compress/gzip" | ||
"encoding/base64" | ||
"io" | ||
"io/ioutil" | ||
"os" | ||
"strings" | ||
|
||
log "github.com/sirupsen/logrus" | ||
) | ||
|
||
// IsFileOrDirExistInGZip return true if file or directory exists in GZip file | ||
func IsFileOrDirExistInGZip(sourcePath string, gzipFilePath string) bool { | ||
|
||
fi, err := os.Open(gzipFilePath) | ||
|
||
if os.IsNotExist(err) { | ||
return false | ||
} | ||
defer close(fi) | ||
|
||
fz, err := gzip.NewReader(fi) | ||
if err != nil { | ||
return false | ||
} | ||
tr := tar.NewReader(fz) | ||
for { | ||
hdr, err := tr.Next() | ||
if err == io.EOF { | ||
break | ||
} | ||
if err != nil { | ||
|
||
return false | ||
} | ||
if hdr.FileInfo().IsDir() && strings.Contains(strings.Trim(hdr.Name, "/"), strings.Trim(sourcePath, "/")) { | ||
return true | ||
} | ||
if strings.Contains(sourcePath, hdr.Name) && hdr.Size > 0 { | ||
return true | ||
} | ||
} | ||
return false | ||
} | ||
|
||
//Close the file | ||
func close(f io.Closer) { | ||
err := f.Close() | ||
if err != nil { | ||
log.Warnf("Failed to close the file/writer/reader. %v", err) | ||
} | ||
} | ||
|
||
// CompressEncodeString will return the compressed string with base64 encoded | ||
func CompressEncodeString(content string) string { | ||
return base64.StdEncoding.EncodeToString(CompressContent([]byte(content))) | ||
} | ||
|
||
// DecodeDecompressString will return decode and decompress the | ||
func DecodeDecompressString(content string) (string, error) { | ||
|
||
buf, err := base64.StdEncoding.DecodeString(content) | ||
if err != nil { | ||
return "", err | ||
} | ||
dBuf, err := DecompressContent(buf) | ||
if err != nil { | ||
return "", err | ||
} | ||
return string(dBuf), nil | ||
} | ||
|
||
// CompressContent will compress the byte array using zip writer | ||
func CompressContent(content []byte) []byte { | ||
var buf bytes.Buffer | ||
zipWriter := gzip.NewWriter(&buf) | ||
|
||
_, err := zipWriter.Write(content) | ||
if err != nil { | ||
log.Warnf("Error in compressing: %v", err) | ||
} | ||
close(zipWriter) | ||
return buf.Bytes() | ||
} | ||
|
||
// DecompressContent will return the uncompressed content | ||
func DecompressContent(content []byte) ([]byte, error) { | ||
|
||
buf := bytes.NewReader(content) | ||
gZipReader, _ := gzip.NewReader(buf) | ||
defer close(gZipReader) | ||
return ioutil.ReadAll(gZipReader) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
package file | ||
|
||
import ( | ||
"testing" | ||
|
||
"github.com/stretchr/testify/assert" | ||
) | ||
|
||
// TestResubmitWorkflowWithOnExit ensures we do not carry over the onExit node even if successful | ||
func TestCompressContentString(t *testing.T) { | ||
content := "{\"pod-limits-rrdm8-591645159\":{\"id\":\"pod-limits-rrdm8-591645159\",\"name\":\"pod-limits-rrdm8[0]." + | ||
"run-pod(0:0)\",\"displayName\":\"run-pod(0:0)\",\"type\":\"Pod\",\"templateName\":\"run-pod\",\"phase\":" + | ||
"\"Succeeded\",\"boundaryID\":\"pod-limits-rrdm8\",\"startedAt\":\"2019-03-07T19:14:50Z\",\"finishedAt\":" + | ||
"\"2019-03-07T19:14:55Z\"}}" | ||
|
||
compString := CompressEncodeString(content) | ||
|
||
resultString, _ := DecodeDecompressString(compString) | ||
|
||
assert.Equal(t, content, resultString) | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why is this under .gitignore?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Sorry these files by mistake got it in PR I will revert it