diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..cf4304334 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +* +!/tls/ +!tinkerbell-server +!entrypoint.sh +!deploy/migrate +!deploy/docker-entrypoint-initdb.d/tinkerbell-init.sql diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..bdf58f7f1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +tinkerbell-server +**/tinkerbell-cli +**/tinkerbell-worker +bin/ +certs/ + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..c14c439ce --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +FROM alpine:3.7 + +ENTRYPOINT [ "/tinkerbell" ] +EXPOSE 42113 +EXPOSE 42114 + +RUN apk add --no-cache --update --upgrade ca-certificates postgresql-client +RUN apk add --no-cache --update --upgrade --repository=http://dl-cdn.alpinelinux.org/alpine/edge/testing cfssl +COPY deploy/migrate /migrate +COPY deploy/docker-entrypoint-initdb.d/tinkerbell-init.sql /init.sql +COPY tinkerbell-server /tinkerbell diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..0ab8209e2 --- /dev/null +++ b/Makefile @@ -0,0 +1,29 @@ +server := tinkerbell-server +cli := tinkerbell-cli +worker := tinkerbell-worker +binaries := ${server} ${cli} ${worker} +all: ${binaries} + +.PHONY: server ${binaries} cli worker test +server: ${server} +cli: ${cli} +worker : ${worker} + +${bindir}: + mkdir -p $@/ + +${server}: + CGO_ENABLED=0 go build -o $@ . + +${cli}: + CGO_ENABLED=0 go build -o ./cmd/tinkerbell/$@ ./cmd/tinkerbell + +${worker}: + CGO_ENABLED=0 go build -o ./worker/$@ ./worker/ + +run: ${binaries} + docker-compose up -d --build db + docker-compose up --build tinkerbell boots +test: + go clean -testcache + go test ./test -v diff --git a/README.md b/README.md index e945fcc83..d79744e38 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,38 @@ -# tinkerbell +# Tinkerbell [https://tinkerbell.org](https://tinkerbell.org) -https://tinkerbell.org +At the highest level `tinkerbell` is the service responsible for handling the workflows. It comprises of a server and a CLI, which communicate over gRPC. The CLI is used to create a workflow along with its building blocks, i.e., template and target. + +# Packet Workflow + +A Packet Workflow is an open-source microservice that’s responsible for handling flexible, bare metal +provisioning workflows, that is... + - standalone and does not need the Packet API to function + - contains `Boots`, `Tinkerbell`, `Osie`, and workers + - can bootstrap any remote worker using `Boots + Osie` + - can run any set of actions as Docker container runtimes + - receive, manipulate, and save runtime data + +## Content + + - [Setup](docs/setup.md) + - [Components](docs/components.md) + - [Boots](docs/components.md#boots) + - [Osie](docs/components.md#osie) + - [Tinkerbell](docs/components.md#tinkerbell) + - [Hegel](docs/components.md#hegel) + - [Database](docs/components.md#database) + - [Image Registry](docs/components.md#registry) + - [Elasticsearch](docs/components.md#elastic) + - [Fluent Bit](docs/components.md#cacher) + - [Kibana](docs/components.md#kibana) + - [Architecture](docs/architecture.md) + - [Example: First Good Workflow](docs/first-good-workflow.md) + - [Concepts](docs/concepts.md) + - [Template](docs/concepts.md#template) + - [Target](docs/concepts.md#target) + - [Provisioner](docs/concepts.md#provisioner) + - [Worker](docs/concepts.md#worker) + - [Ephemeral Data](docs/concepts.md#ephemeral-data) + - [Writing a Workflow](docs/writing-workflow.md) + - [Tinkerbell CLI Reference](docs/cli/README.md) + - [Troubleshooting](docs/troubleshoot.md) diff --git a/client/main.go b/client/main.go new file mode 100644 index 000000000..eba193480 --- /dev/null +++ b/client/main.go @@ -0,0 +1,80 @@ +package client + +import ( + "crypto/x509" + "io/ioutil" + "log" + "net/http" + "os" + + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/packethost/tinkerbell/protos/target" + "github.com/packethost/tinkerbell/protos/template" + "github.com/packethost/tinkerbell/protos/workflow" + "github.com/pkg/errors" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" +) + +// gRPC clients +var ( + TemplateClient template.TemplateClient + TargetClient target.TargetClient + WorkflowClient workflow.WorkflowSvcClient + HardwareClient hardware.HardwareServiceClient +) + +// GetConnection returns a gRPC client connection +func GetConnection() (*grpc.ClientConn, error) { + certURL := os.Getenv("TINKERBELL_CERT_URL") + if certURL == "" { + return nil, errors.New("undefined TINKERBELL_CERT_URL") + } + resp, err := http.Get(certURL) + if err != nil { + return nil, errors.Wrap(err, "fetch cert") + } + defer resp.Body.Close() + + certs, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, errors.Wrap(err, "read cert") + } + + cp := x509.NewCertPool() + ok := cp.AppendCertsFromPEM(certs) + if !ok { + return nil, errors.Wrap(err, "parse cert") + } + + grpcAuthority := os.Getenv("TINKERBELL_GRPC_AUTHORITY") + if grpcAuthority == "" { + return nil, errors.New("undefined TINKERBELL_GRPC_AUTHORITY") + } + creds := credentials.NewClientTLSFromCert(cp, "") + conn, err := grpc.Dial(grpcAuthority, grpc.WithTransportCredentials(creds)) + if err != nil { + return nil, errors.Wrap(err, "connect to tinkerbell server") + } + return conn, nil +} + +// Setup : create a connection to server +func Setup() { + conn, err := GetConnection() + if err != nil { + log.Fatal(err) + } + TemplateClient = template.NewTemplateClient(conn) + TargetClient = target.NewTargetClient(conn) + WorkflowClient = workflow.NewWorkflowSvcClient(conn) + HardwareClient = hardware.NewHardwareServiceClient(conn) +} + +func NewTinkerbellClient() (hardware.HardwareServiceClient, error) { + conn, err := GetConnection() + if err != nil { + log.Fatal(err) + } + return hardware.NewHardwareServiceClient(conn), nil +} diff --git a/cmd/rover/.gitkeep b/cmd/rover/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/cmd/tinkerbell/Dockerfile b/cmd/tinkerbell/Dockerfile new file mode 100644 index 000000000..85a07d76d --- /dev/null +++ b/cmd/tinkerbell/Dockerfile @@ -0,0 +1,7 @@ +FROM alpine:3.7 + +CMD sleep 60d + +RUN apk add --no-cache --update --upgrade ca-certificates +COPY tinkerbell-cli /bin/tinkerbell +COPY sample.tmpl /tmp diff --git a/cmd/tinkerbell/cmd/hardware.go b/cmd/tinkerbell/cmd/hardware.go new file mode 100644 index 000000000..624c341b7 --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware.go @@ -0,0 +1,25 @@ +package cmd + +import ( + "fmt" + + "github.com/packethost/tinkerbell/cmd/tinkerbell/cmd/hardware" + "github.com/spf13/cobra" +) + +var hardwareCmd = &cobra.Command{ + Use: "hardware", + Short: "tinkerbell hardware client", + Example: "tinkerbell hardware [command]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires arguments", c.UseLine()) + } + return nil + }, +} + +func init() { + hardwareCmd.AddCommand(hardware.SubCommands...) + rootCmd.AddCommand(hardwareCmd) +} diff --git a/cmd/tinkerbell/cmd/hardware/all.go b/cmd/tinkerbell/cmd/hardware/all.go new file mode 100644 index 000000000..80a7e837e --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/all.go @@ -0,0 +1,37 @@ +package hardware + +import ( + "context" + "fmt" + "io" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// allCmd represents the all command +var allCmd = &cobra.Command{ + Use: "all", + Short: "Get all known hardware for facility", + Run: func(cmd *cobra.Command, args []string) { + alls, err := client.HardwareClient.All(context.Background(), &hardware.Empty{}) + if err != nil { + log.Fatal(err) + } + + var hw *hardware.Hardware + err = nil + for hw, err = alls.Recv(); err == nil && hw != nil; hw, err = alls.Recv() { + fmt.Println(hw.JSON) + } + if err != nil && err != io.EOF { + log.Fatal(err) + } + }, +} + +func init() { + SubCommands = append(SubCommands, allCmd) +} diff --git a/cmd/tinkerbell/cmd/hardware/commands.go b/cmd/tinkerbell/cmd/hardware/commands.go new file mode 100644 index 000000000..40fbf9d11 --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/commands.go @@ -0,0 +1,25 @@ +package hardware + +import ( + "fmt" + + "github.com/pkg/errors" + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// SubCommands holds the sub commands for template command +// Example: tinkerbell template [subcommand] +var SubCommands []*cobra.Command + +func verifyUUIDs(args []string) error { + if len(args) < 1 { + return errors.New("requires at least one id") + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil +} diff --git a/cmd/tinkerbell/cmd/hardware/id.go b/cmd/tinkerbell/cmd/hardware/id.go new file mode 100644 index 000000000..fb9de27ee --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/id.go @@ -0,0 +1,36 @@ +// Copyright © 2018 packet.net + +package hardware + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// idCmd represents the id command +var idCmd = &cobra.Command{ + Use: "id", + Short: "Get hardware by id", + Example: "tinkerbell hardware id 224ee6ab-ad62-4070-a900-ed816444cec0 cb76ae54-93e9-401c-a5b2-d455bb3800b1", + Args: func(_ *cobra.Command, args []string) error { + return verifyUUIDs(args) + }, + Run: func(cmd *cobra.Command, args []string) { + for _, id := range args { + hw, err := client.HardwareClient.ByID(context.Background(), &hardware.GetRequest{ID: id}) + if err != nil { + log.Fatal(err) + } + fmt.Println(hw.JSON) + } + }, +} + +func init() { + SubCommands = append(SubCommands, idCmd) +} diff --git a/cmd/tinkerbell/cmd/hardware/ingest.go b/cmd/tinkerbell/cmd/hardware/ingest.go new file mode 100644 index 000000000..1e159286d --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/ingest.go @@ -0,0 +1,32 @@ +// Copyright © 2018 packet.net + +package hardware + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// ingestCmd represents the ingest command +var ingestCmd = &cobra.Command{ + Use: "ingest", + Short: "Trigger tinkerbell to ingest", + Long: "This command only signals tinkerbell to ingest if it has not already done so.", + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("ingest called") + _, err := client.HardwareClient.Ingest(context.Background(), &hardware.Empty{}) + if err != nil { + log.Fatal(err) + } + }, +} + +func init() { + SubCommands = append(SubCommands, ingestCmd) + +} diff --git a/cmd/tinkerbell/cmd/hardware/ip.go b/cmd/tinkerbell/cmd/hardware/ip.go new file mode 100644 index 000000000..50e3953b2 --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/ip.go @@ -0,0 +1,42 @@ +// Copyright © 2018 packet.net + +package hardware + +import ( + "context" + "fmt" + "log" + "net" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// ipCmd represents the ip command +var ipCmd = &cobra.Command{ + Use: "ip", + Short: "Get hardware by any associated ip", + Example: "tinkerbell hardware ip 10.0.0.2 10.0.0.3", + Args: func(_ *cobra.Command, args []string) error { + for _, arg := range args { + if net.ParseIP(arg) == nil { + return fmt.Errorf("invalid ip: %s", arg) + } + } + return nil + }, + Run: func(cmd *cobra.Command, args []string) { + for _, ip := range args { + hw, err := client.HardwareClient.ByIP(context.Background(), &hardware.GetRequest{IP: ip}) + if err != nil { + log.Fatal(err) + } + fmt.Println(hw.JSON) + } + }, +} + +func init() { + SubCommands = append(SubCommands, ipCmd) +} diff --git a/cmd/tinkerbell/cmd/hardware/mac.go b/cmd/tinkerbell/cmd/hardware/mac.go new file mode 100644 index 000000000..e4544b445 --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/mac.go @@ -0,0 +1,42 @@ +// Copyright © 2018 packet.net + +package hardware + +import ( + "context" + "fmt" + "log" + "net" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// macCmd represents the mac command +var macCmd = &cobra.Command{ + Use: "mac", + Short: "Get hardware by any associated mac", + Example: "tinkerbell hardware mac 00:00:00:00:00:01 00:00:00:00:00:02", + Args: func(_ *cobra.Command, args []string) error { + for _, arg := range args { + if _, err := net.ParseMAC(arg); err != nil { + return fmt.Errorf("invalid mac: %s", arg) + } + } + return nil + }, + Run: func(cmd *cobra.Command, args []string) { + for _, mac := range args { + hw, err := client.HardwareClient.ByMAC(context.Background(), &hardware.GetRequest{MAC: mac}) + if err != nil { + log.Fatal(err) + } + fmt.Println(hw.JSON) + } + }, +} + +func init() { + SubCommands = append(SubCommands, macCmd) +} diff --git a/cmd/tinkerbell/cmd/hardware/push.go b/cmd/tinkerbell/cmd/hardware/push.go new file mode 100644 index 000000000..8a644c446 --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/push.go @@ -0,0 +1,46 @@ +// Copyright © 2018 packet.net + +package hardware + +import ( + "context" + "encoding/json" + "fmt" + "log" + "strings" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// pushCmd represents the push command +var pushCmd = &cobra.Command{ + Use: "push", + Short: "Push new hardware to tinkerbell", + Example: `tinkerbell hardware push '{"id":"2a1519e5-781c-4251-a979-3a6bedb8ba59", ...}' '{"id:"315169a4-a863-43ef-8817-2b6a57bd1eef", ...}'`, + Args: func(_ *cobra.Command, args []string) error { + s := struct { + ID string + }{} + for _, arg := range args { + if json.NewDecoder(strings.NewReader(arg)).Decode(&s) != nil { + return fmt.Errorf("invalid json: %s", arg) + } else if s.ID == "" { + return fmt.Errorf("invalid json, ID is required: %s", arg) + } + } + return nil + }, + Run: func(cmd *cobra.Command, args []string) { + for _, j := range args { + if _, err := client.HardwareClient.Push(context.Background(), &hardware.PushRequest{Data: j}); err != nil { + log.Fatal(err) + } + } + }, +} + +func init() { + SubCommands = append(SubCommands, pushCmd) +} diff --git a/cmd/tinkerbell/cmd/hardware/watch.go b/cmd/tinkerbell/cmd/hardware/watch.go new file mode 100644 index 000000000..0abbbda00 --- /dev/null +++ b/cmd/tinkerbell/cmd/hardware/watch.go @@ -0,0 +1,53 @@ +// Copyright © 2018 packet.net + +package hardware + +import ( + "context" + "fmt" + "io" + "log" + "sync" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/hardware" + "github.com/spf13/cobra" +) + +// watchCmd represents the watch command +var watchCmd = &cobra.Command{ + Use: "watch", + Short: "Register to watch an id for any changes", + Example: "tinkerbell hardware watch 224ee6ab-ad62-4070-a900-ed816444cec0 cb76ae54-93e9-401c-a5b2-d455bb3800b1", + Args: func(_ *cobra.Command, args []string) error { + return verifyUUIDs(args) + }, + Run: func(cmd *cobra.Command, args []string) { + stdoutLock := sync.Mutex{} + for _, id := range args { + go func(id string) { + stream, err := client.HardwareClient.Watch(context.Background(), &hardware.GetRequest{ID: id}) + if err != nil { + log.Fatal(err) + } + + var hw *hardware.Hardware + err = nil + for hw, err = stream.Recv(); err == nil && hw != nil; hw, err = stream.Recv() { + stdoutLock.Lock() + fmt.Println(hw.JSON) + stdoutLock.Unlock() + } + if err != nil && err != io.EOF { + log.Fatal(err) + } + }(id) + } + select {} + }, +} + +func init() { + watchCmd.Flags().String("id", "", "id of the hardware") + SubCommands = append(SubCommands, watchCmd) +} diff --git a/cmd/tinkerbell/cmd/root.go b/cmd/tinkerbell/cmd/root.go new file mode 100644 index 000000000..ebd33f5cf --- /dev/null +++ b/cmd/tinkerbell/cmd/root.go @@ -0,0 +1,43 @@ +package cmd + +import ( + "fmt" + "os" + + "github.com/packethost/tinkerbell/client" + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +var cfgFile string + +// rootCmd represents the base command when called without any subcommands +var rootCmd = &cobra.Command{ + Use: "tinkerbell", + Short: "tinkerbell client", +} + +// Execute adds all child commands to the root command and sets flags appropriately. +// This is called by main.main(). It only needs to happen once to the rootCmd. +func Execute() { + if err := rootCmd.Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } +} + +func init() { + cobra.OnInitialize(initConfig) + rootCmd.PersistentFlags().StringVarP(&cfgFile, "facility", "f", "", "used to build grcp and http urls") + client.Setup() +} + +// initConfig reads in config file and ENV variables if set. +func initConfig() { + viper.AutomaticEnv() // read in environment variables that match + + // If a config file is found, read it in. + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } +} diff --git a/cmd/tinkerbell/cmd/target.go b/cmd/tinkerbell/cmd/target.go new file mode 100644 index 000000000..a0c5d4c4b --- /dev/null +++ b/cmd/tinkerbell/cmd/target.go @@ -0,0 +1,26 @@ +package cmd + +import ( + "fmt" + + "github.com/packethost/tinkerbell/cmd/tinkerbell/cmd/target" + "github.com/spf13/cobra" +) + +// templateCmd represents the template sub-command +var targetCmd = &cobra.Command{ + Use: "target", + Short: "tinkerbell target client", + Example: "tinkerbell target [command]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires arguments", c.UseLine()) + } + return nil + }, +} + +func init() { + targetCmd.AddCommand(target.SubCommands...) + rootCmd.AddCommand(targetCmd) +} diff --git a/cmd/tinkerbell/cmd/target/commands.go b/cmd/tinkerbell/cmd/target/commands.go new file mode 100644 index 000000000..4dad988cf --- /dev/null +++ b/cmd/tinkerbell/cmd/target/commands.go @@ -0,0 +1,59 @@ +package target + +import ( + "encoding/json" + "errors" + "fmt" + "net" + + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// SubCommands holds the sub commands for targets command +// Example: tinkerbell targets [subcommand] +var SubCommands []*cobra.Command + +func verifyUUIDs(args []string) error { + if len(args) < 1 { + return errors.New("requires at least one id") + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil +} + +type machine map[string]string + +type tmap struct { + Targets map[string]machine `json:"targets"` +} + +func isValidData(arg []byte) error { + var tr tmap + if json.Unmarshal([]byte(arg), &tr) != nil { + return fmt.Errorf("invalid json: %s", arg) + } + for _, v := range tr.Targets { + for key, val := range v { + switch key { + case string("mac_addr"): + _, err := net.ParseMAC(val) + if err != nil { + return err + } + case string("ipv4_addr"): + ip := net.ParseIP(val) + if ip == nil || ip.To4() == nil { + return fmt.Errorf("invalid ip_addr: %s", val) + } + default: + return fmt.Errorf("invalid key \"%s\" in data. it should be \"mac_addr\" or \"ip_addr\" only", key) + } + } + } + return nil +} diff --git a/cmd/tinkerbell/cmd/target/create.go b/cmd/tinkerbell/cmd/target/create.go new file mode 100644 index 000000000..01a26cd4b --- /dev/null +++ b/cmd/tinkerbell/cmd/target/create.go @@ -0,0 +1,35 @@ +package target + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/target" + "github.com/spf13/cobra" +) + +// pushCmd represents the push command +var createTargets = &cobra.Command{ + Use: "create", + Short: "create a target", + Example: `tinkerbell target create '{"targets": {"machine1": {"mac_addr": "02:42:db:98:4b:1e"},"machine2": {"ipv4_addr": "192.168.1.5"}}}'`, + Run: func(cmd *cobra.Command, args []string) { + for _, j := range args { + err := isValidData([]byte(j)) + if err != nil { + log.Fatal(err) + } + uuid, err := client.TargetClient.CreateTargets(context.Background(), &target.PushRequest{Data: j}) + if err != nil { + log.Fatal(err) + } + fmt.Println("Created Target:", uuid.Uuid) + } + }, +} + +func init() { + SubCommands = append(SubCommands, createTargets) +} diff --git a/cmd/tinkerbell/cmd/target/delete.go b/cmd/tinkerbell/cmd/target/delete.go new file mode 100644 index 000000000..eccd519c2 --- /dev/null +++ b/cmd/tinkerbell/cmd/target/delete.go @@ -0,0 +1,32 @@ +package target + +import ( + "context" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/target" + "github.com/spf13/cobra" +) + +// idCmd represents the id command +var deleteCmd = &cobra.Command{ + Use: "delete", + Short: "delete a target", + Example: "tinkerbell target delete 224ee6ab-ad62-4070-a900-ed816444cec0 cb76ae54-93e9-401c-a5b2-d455bb3800b1", + Args: func(_ *cobra.Command, args []string) error { + return verifyUUIDs(args) + }, + Run: func(cmd *cobra.Command, args []string) { + for _, id := range args { + _, err := client.TargetClient.DeleteTargetByID(context.Background(), &target.GetRequest{ID: id}) + if err != nil { + log.Fatal(err) + } + } + }, +} + +func init() { + SubCommands = append(SubCommands, deleteCmd) +} diff --git a/cmd/tinkerbell/cmd/target/get.go b/cmd/tinkerbell/cmd/target/get.go new file mode 100644 index 000000000..93af857f4 --- /dev/null +++ b/cmd/tinkerbell/cmd/target/get.go @@ -0,0 +1,34 @@ +package target + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/target" + "github.com/spf13/cobra" +) + +// idCmd represents the id command +var getCmd = &cobra.Command{ + Use: "get", + Short: "get a target", + Example: "tinkerbell target get 224ee6ab-ad62-4070-a900-ed816444cec0 cb76ae54-93e9-401c-a5b2-d455bb3800b1", + Args: func(_ *cobra.Command, args []string) error { + return verifyUUIDs(args) + }, + Run: func(cmd *cobra.Command, args []string) { + for _, id := range args { + tr, err := client.TargetClient.TargetByID(context.Background(), &target.GetRequest{ID: id}) + if err != nil { + log.Fatal(err) + } + fmt.Println(tr.JSON) + } + }, +} + +func init() { + SubCommands = append(SubCommands, getCmd) +} diff --git a/cmd/tinkerbell/cmd/target/list.go b/cmd/tinkerbell/cmd/target/list.go new file mode 100644 index 000000000..28f458855 --- /dev/null +++ b/cmd/tinkerbell/cmd/target/list.go @@ -0,0 +1,58 @@ +package target + +import ( + "context" + "fmt" + "io" + "log" + "os" + + "github.com/jedib0t/go-pretty/table" + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/target" + "github.com/spf13/cobra" +) + +// listCmd represents the list subcommand for target command +var listCmd = &cobra.Command{ + Use: "list", + Short: "list all targets", + Example: "tinkerbell target list", + Args: func(c *cobra.Command, args []string) error { + if len(args) != 0 { + return fmt.Errorf("%v takes no arguments", c.UseLine()) + } + return nil + }, + Run: func(cmd *cobra.Command, args []string) { + t := table.NewWriter() + t.SetOutputMirror(os.Stdout) + t.AppendHeader(table.Row{"Target Id", "Target Data"}) + listTargets(cmd, t) + t.Render() + }, +} + +func listTargets(cmd *cobra.Command, t table.Writer) { + list, err := client.TargetClient.ListTargets(context.Background(), &target.Empty{}) + if err != nil { + log.Fatal(err) + } + + var tmp *target.TargetList + err = nil + for tmp, err = list.Recv(); err == nil && tmp.Data != ""; tmp, err = list.Recv() { + t.AppendRows([]table.Row{ + {tmp.ID, tmp.Data}, + }) + } + + if err != nil && err != io.EOF { + log.Fatal(err) + } +} + +func init() { + listCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, listCmd) +} diff --git a/cmd/tinkerbell/cmd/target/update.go b/cmd/tinkerbell/cmd/target/update.go new file mode 100644 index 000000000..fc29c842d --- /dev/null +++ b/cmd/tinkerbell/cmd/target/update.go @@ -0,0 +1,59 @@ +package target + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/target" + "github.com/spf13/cobra" +) + +var ( + id = "uuid" + data = "jsonData" + uid string + jsonData string +) + +// createCmd represents the create sub command for template command +var updateCmd = &cobra.Command{ + Use: "update", + Short: "update a target", + Example: "tinkerbell target update [flags]", + Run: func(c *cobra.Command, args []string) { + err := validateData(c, args) + if err != nil { + log.Fatal(err) + } + updateTargets(c, args) + }, +} + +func addFlags() { + flags := updateCmd.PersistentFlags() + flags.StringVarP(&uid, "uuid", "u", "", "id for targets to be updated") + flags.StringVarP(&jsonData, "jsondata", "j", "", "JSON data which needs to be pushed") + updateCmd.MarkPersistentFlagRequired(id) + updateCmd.MarkPersistentFlagRequired(data) +} + +func validateData(c *cobra.Command, args []string) error { + err := isValidData([]byte(jsonData)) + if err != nil { + fmt.Println(err) + return err + } + return nil +} +func updateTargets(c *cobra.Command, args []string) { + if _, err := client.TargetClient.UpdateTargetByID(context.Background(), &target.UpdateRequest{ID: uid, Data: jsonData}); err != nil { + log.Fatal(err) + } +} + +func init() { + addFlags() + SubCommands = append(SubCommands, updateCmd) +} diff --git a/cmd/tinkerbell/cmd/template.go b/cmd/tinkerbell/cmd/template.go new file mode 100644 index 000000000..e3303f16d --- /dev/null +++ b/cmd/tinkerbell/cmd/template.go @@ -0,0 +1,26 @@ +package cmd + +import ( + "fmt" + + "github.com/packethost/tinkerbell/cmd/tinkerbell/cmd/template" + "github.com/spf13/cobra" +) + +// templateCmd represents the template sub-command +var templateCmd = &cobra.Command{ + Use: "template", + Short: "tinkerbell template client", + Example: "tinkerbell template [command]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires arguments", c.UseLine()) + } + return nil + }, +} + +func init() { + templateCmd.AddCommand(template.SubCommands...) + rootCmd.AddCommand(templateCmd) +} diff --git a/cmd/tinkerbell/cmd/template/commands.go b/cmd/tinkerbell/cmd/template/commands.go new file mode 100644 index 000000000..3cddeac5a --- /dev/null +++ b/cmd/tinkerbell/cmd/template/commands.go @@ -0,0 +1,7 @@ +package template + +import "github.com/spf13/cobra" + +// SubCommands holds the sub commands for template command +// Example: tinkerbell template [subcommand] +var SubCommands []*cobra.Command diff --git a/cmd/tinkerbell/cmd/template/create.go b/cmd/tinkerbell/cmd/template/create.go new file mode 100644 index 000000000..7bff065b5 --- /dev/null +++ b/cmd/tinkerbell/cmd/template/create.go @@ -0,0 +1,76 @@ +package template + +import ( + "context" + "fmt" + "io/ioutil" + "log" + "os" + tt "text/template" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/template" + "github.com/spf13/cobra" +) + +var ( + fPath = "path" + fName = "name" + filePath string + templateName string +) + +// createCmd represents the create subcommand for template command +var createCmd = &cobra.Command{ + Use: "create", + Short: "create a workflow template ", + Example: "tinkerbell template create [flags]", + Run: func(c *cobra.Command, args []string) { + validateTemplate() + createTemplate(c, args) + }, +} + +func addFlags() { + flags := createCmd.PersistentFlags() + flags.StringVarP(&filePath, "path", "p", "", "path to the template file") + flags.StringVarP(&templateName, "name", "n", "", "unique name for the template (alphanumeric)") + + createCmd.MarkPersistentFlagRequired(fPath) + createCmd.MarkPersistentFlagRequired(fName) +} + +func validateTemplate() { + _, err := tt.ParseFiles(filePath) + if err != nil { + log.Fatalln(err) + } +} + +func readTemplateData() []byte { + f, err := os.Open(filePath) + if err != nil { + log.Fatal(err) + } + defer f.Close() + + data, err := ioutil.ReadAll(f) + if err != nil { + log.Fatal(err) + } + return data +} + +func createTemplate(c *cobra.Command, args []string) { + req := template.WorkflowTemplate{Name: templateName, Data: readTemplateData()} + res, err := client.TemplateClient.CreateTemplate(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + fmt.Println("Created Template: ", res.Id) +} + +func init() { + addFlags() + SubCommands = append(SubCommands, createCmd) +} diff --git a/cmd/tinkerbell/cmd/template/delete.go b/cmd/tinkerbell/cmd/template/delete.go new file mode 100644 index 000000000..f00dab797 --- /dev/null +++ b/cmd/tinkerbell/cmd/template/delete.go @@ -0,0 +1,43 @@ +package template + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/template" + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// deleteCmd represents the delete subcommand for template command +var deleteCmd = &cobra.Command{ + Use: "delete [id]", + Short: "delete a template", + Example: "tinkerbell template delete [id]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires an argument", c.UseLine()) + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + req := template.GetRequest{Id: arg} + if _, err := client.TemplateClient.DeleteTemplate(context.Background(), &req); err != nil { + log.Fatal(err) + } + } + }, +} + +func init() { + deleteCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, deleteCmd) +} diff --git a/cmd/tinkerbell/cmd/template/get.go b/cmd/tinkerbell/cmd/template/get.go new file mode 100644 index 000000000..f3c277752 --- /dev/null +++ b/cmd/tinkerbell/cmd/template/get.go @@ -0,0 +1,45 @@ +package template + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/template" + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// getCmd represents the get subcommand for template command +var getCmd = &cobra.Command{ + Use: "get [id]", + Short: "get a template", + Example: "tinkerbell template get [id]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires an argument", c.UseLine()) + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + req := template.GetRequest{Id: arg} + t, err := client.TemplateClient.GetTemplate(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + fmt.Println(string(t.Data)) + } + }, +} + +func init() { + getCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, getCmd) +} diff --git a/cmd/tinkerbell/cmd/template/list.go b/cmd/tinkerbell/cmd/template/list.go new file mode 100644 index 000000000..06afbd54c --- /dev/null +++ b/cmd/tinkerbell/cmd/template/list.go @@ -0,0 +1,69 @@ +package template + +import ( + "context" + "fmt" + "io" + "log" + "os" + "time" + + "github.com/jedib0t/go-pretty/table" + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/template" + "github.com/spf13/cobra" +) + +// table headers +var ( + id = "Template ID" + name = "Template Name" + createdAt = "Created At" + updatedAt = "Updated At" +) + +// listCmd represents the list subcommand for template command +var listCmd = &cobra.Command{ + Use: "list", + Short: "list all saved templates", + Example: "tinkerbell template list", + Args: func(c *cobra.Command, args []string) error { + if len(args) != 0 { + return fmt.Errorf("%v takes no arguments", c.UseLine()) + } + return nil + }, + Run: func(cmd *cobra.Command, args []string) { + t := table.NewWriter() + t.SetOutputMirror(os.Stdout) + t.AppendHeader(table.Row{id, name, createdAt, updatedAt}) + listTemplates(cmd, t) + t.Render() + }, +} + +func listTemplates(cmd *cobra.Command, t table.Writer) { + list, err := client.TemplateClient.ListTemplates(context.Background(), &template.Empty{}) + if err != nil { + log.Fatal(err) + } + + var tmp *template.WorkflowTemplate + err = nil + for tmp, err = list.Recv(); err == nil && tmp.Name != ""; tmp, err = list.Recv() { + cr := *tmp.CreatedAt + up := *tmp.UpdatedAt + t.AppendRows([]table.Row{ + {tmp.Id, tmp.Name, time.Unix(cr.Seconds, 0), time.Unix(up.Seconds, 0)}, + }) + } + + if err != nil && err != io.EOF { + log.Fatal(err) + } +} + +func init() { + listCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, listCmd) +} diff --git a/cmd/tinkerbell/cmd/template/update.go b/cmd/tinkerbell/cmd/template/update.go new file mode 100644 index 000000000..fc761432b --- /dev/null +++ b/cmd/tinkerbell/cmd/template/update.go @@ -0,0 +1,70 @@ +package template + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/template" + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// updateCmd represents the get subcommand for template command +var updateCmd = &cobra.Command{ + Use: "update [id] [flags]", + Short: "update a template", + Example: "tinkerbell template update [id] [flags]", + PreRunE: func(c *cobra.Command, args []string) error { + name, _ := c.Flags().GetString(fName) + path, _ := c.Flags().GetString(fPath) + if name == "" && path == "" { + return fmt.Errorf("%v requires at least one flag", c.UseLine()) + } + return nil + }, + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires argument", c.UseLine()) + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + updateTemplate(arg) + } + }, +} + +func updateTemplate(id string) { + req := template.WorkflowTemplate{Id: id} + if filePath == "" && templateName != "" { + req.Name = templateName + } else if filePath != "" && templateName == "" { + validateTemplate() + req.Data = readTemplateData() + } else { + req.Name = templateName + req.Data = readTemplateData() + } + + _, err := client.TemplateClient.UpdateTemplate(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + fmt.Println("Updated Template: ", id) +} + +func init() { + flags := updateCmd.PersistentFlags() + flags.StringVarP(&filePath, "path", "p", "", "path to the template file") + flags.StringVarP(&templateName, "name", "n", "", "unique name for the template (alphanumeric)") + + SubCommands = append(SubCommands, updateCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow.go b/cmd/tinkerbell/cmd/workflow.go new file mode 100644 index 000000000..c88454d3c --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow.go @@ -0,0 +1,26 @@ +package cmd + +import ( + "fmt" + + "github.com/packethost/tinkerbell/cmd/tinkerbell/cmd/workflow" + "github.com/spf13/cobra" +) + +// workflowCmd represents the workflow sub-command +var workflowCmd = &cobra.Command{ + Use: "workflow", + Short: "tinkerbell workflow client", + Example: "tinkerbell workflow [command]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires arguments", c.UseLine()) + } + return nil + }, +} + +func init() { + workflowCmd.AddCommand(workflow.SubCommands...) + rootCmd.AddCommand(workflowCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/commands.go b/cmd/tinkerbell/cmd/workflow/commands.go new file mode 100644 index 000000000..028236492 --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/commands.go @@ -0,0 +1,18 @@ +package workflow + +import ( + "fmt" + + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// SubCommands hold all the subcommands for tinkerbell cli +var SubCommands []*cobra.Command + +func validateID(id string) error { + if _, err := uuid.FromString(id); err != nil { + return fmt.Errorf("invalid uuid: %s", id) + } + return nil +} diff --git a/cmd/tinkerbell/cmd/workflow/create.go b/cmd/tinkerbell/cmd/workflow/create.go new file mode 100644 index 000000000..420b68c97 --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/create.go @@ -0,0 +1,61 @@ +package workflow + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + "github.com/spf13/cobra" +) + +var ( + fTemplate = "template" + fTarget = "target" + template string + target string +) + +// createCmd represents the create subcommand for worflow command +var createCmd = &cobra.Command{ + Use: "create", + Short: "create a workflow", + Example: "tinkerbell workflow create [flags]", + PreRunE: func(c *cobra.Command, args []string) error { + tmp, _ := c.Flags().GetString(fTemplate) + err := validateID(tmp) + if err != nil { + return err + } + tar, _ := c.Flags().GetString(fTarget) + err = validateID(tar) + return err + }, + Run: func(c *cobra.Command, args []string) { + createWorkflow(c, args) + }, +} + +func addFlags() { + flags := createCmd.PersistentFlags() + flags.StringVarP(&template, "template", "t", "", "workflow template") + flags.StringVarP(&target, "target", "r", "", "workflow target") + + createCmd.MarkPersistentFlagRequired(fTarget) + createCmd.MarkPersistentFlagRequired(fTemplate) +} + +func createWorkflow(c *cobra.Command, args []string) { + req := workflow.CreateRequest{Template: template, Target: target} + res, err := client.WorkflowClient.CreateWorkflow(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + fmt.Println("Created Workflow: ", res.Id) +} + +func init() { + addFlags() + SubCommands = append(SubCommands, createCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/data.go b/cmd/tinkerbell/cmd/workflow/data.go new file mode 100644 index 000000000..48f1e1c92 --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/data.go @@ -0,0 +1,69 @@ +package workflow + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +var ( + version int32 + needsMetadata bool + versionOnly bool +) + +// dataCmd represents the data subcommand for workflow command +var dataCmd = &cobra.Command{ + Use: "data [id]", + Short: "get workflow data", + Example: "tinkerbell workflow data [id] [flags]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires an argument", c.UseLine()) + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + req := &workflow.GetWorkflowDataRequest{WorkflowID: arg, Version: version} + var res *workflow.GetWorkflowDataResponse + var err error + if needsMetadata { + res, err = client.WorkflowClient.GetWorkflowMetadata(context.Background(), req) + } else if versionOnly { + res, err = client.WorkflowClient.GetWorkflowDataVersion(context.Background(), req) + } else { + res, err = client.WorkflowClient.GetWorkflowData(context.Background(), req) + } + + if err != nil { + log.Fatal(err) + } + + if versionOnly { + fmt.Printf("Latest workflow data version: %v\n", res.Version) + } else { + fmt.Println(string(res.Data)) + } + } + }, +} + +func init() { + flags := dataCmd.PersistentFlags() + flags.Int32VarP(&version, "version", "v", 0, "data version") + flags.BoolVarP(&needsMetadata, "metadata", "m", false, "metadata only") + flags.BoolVarP(&versionOnly, "latest version", "l", false, "latest version") + + SubCommands = append(SubCommands, dataCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/delete.go b/cmd/tinkerbell/cmd/workflow/delete.go new file mode 100644 index 000000000..c6fce6e19 --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/delete.go @@ -0,0 +1,43 @@ +package workflow + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + uuid "github.com/satori/go.uuid" + "github.com/spf13/cobra" +) + +// deleteCmd represents the delete subcommand for workflow command +var deleteCmd = &cobra.Command{ + Use: "delete [id]", + Short: "delete a workflow", + Example: "tinkerbell workflow delete [id]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires an argument", c.UseLine()) + } + for _, arg := range args { + if _, err := uuid.FromString(arg); err != nil { + return fmt.Errorf("invalid uuid: %s", arg) + } + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + req := workflow.GetRequest{Id: arg} + if _, err := client.WorkflowClient.DeleteWorkflow(context.Background(), &req); err != nil { + log.Fatal(err) + } + } + }, +} + +func init() { + deleteCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, deleteCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/events.go b/cmd/tinkerbell/cmd/workflow/events.go new file mode 100644 index 000000000..225a066be --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/events.go @@ -0,0 +1,69 @@ +package workflow + +import ( + "context" + "fmt" + "io" + "log" + "os" + + "github.com/jedib0t/go-pretty/table" + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + "github.com/spf13/cobra" +) + +var ( + hWorkerID = "Worker ID" + hTaskName = "Task Name" + hActionName = "Action Name" + hExecutionTime = "Execution Time" + hMessage = "Message" + hStatus = "Action Status" +) + +// showCmd represents the events subcommand for workflow command +var showCmd = &cobra.Command{ + Use: "events [id]", + Short: "show all events for a workflow", + Example: "tinkerbell workflow events [id]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v takes an arguments", c.UseLine()) + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + t := table.NewWriter() + t.SetOutputMirror(os.Stdout) + t.AppendHeader(table.Row{hWorkerID, hTaskName, hActionName, hExecutionTime, hMessage, hStatus}) + listEvents(c, t, args) + t.Render() + + }, +} + +func listEvents(c *cobra.Command, t table.Writer, args []string) { + for _, arg := range args { + req := workflow.GetRequest{Id: arg} + events, err := client.WorkflowClient.ShowWorkflowEvents(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + //var wf *workflow.Workflow + err = nil + for event, err := events.Recv(); err == nil && event != nil; event, err = events.Recv() { + t.AppendRows([]table.Row{ + {event.WorkerId, event.TaskName, event.ActionName, event.Seconds, event.Message, event.ActionStatus}, + }) + } + if err != nil && err != io.EOF { + log.Fatal(err) + } + } +} + +func init() { + showCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, showCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/get.go b/cmd/tinkerbell/cmd/workflow/get.go new file mode 100644 index 000000000..30f59972c --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/get.go @@ -0,0 +1,46 @@ +package workflow + +import ( + "context" + "fmt" + "log" + + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + "github.com/spf13/cobra" +) + +var ( + hID = "Workflow ID" + hTemplate = "Template ID" + hTarget = "Target ID" + hState = "State" +) + +// getCmd represents the get subcommand for workflow command +var getCmd = &cobra.Command{ + Use: "get [id]", + Short: "get a workflow", + Example: "tinkerbell workflow get [id]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires an argument", c.UseLine()) + } + return validateID(args[0]) + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + req := workflow.GetRequest{Id: arg} + w, err := client.WorkflowClient.GetWorkflow(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + fmt.Println(w.Data) + } + }, +} + +func init() { + getCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, getCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/list.go b/cmd/tinkerbell/cmd/workflow/list.go new file mode 100644 index 000000000..d78a8c412 --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/list.go @@ -0,0 +1,67 @@ +package workflow + +import ( + "context" + "fmt" + "io" + "log" + "os" + "time" + + "github.com/jedib0t/go-pretty/table" + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + "github.com/spf13/cobra" +) + +var ( + hCreatedAt = "Created At" + hUpdatedAt = "Updated At" +) + +// listCmd represents the list subcommand for workflow command +var listCmd = &cobra.Command{ + Use: "list", + Short: "list all workflows", + Example: "tinkerbell workflow list", + Args: func(c *cobra.Command, args []string) error { + if len(args) != 0 { + return fmt.Errorf("%v takes no arguments", c.UseLine()) + } + return nil + }, + Run: func(c *cobra.Command, args []string) { + t := table.NewWriter() + t.SetOutputMirror(os.Stdout) + t.AppendHeader(table.Row{hID, hTemplate, hTarget, hCreatedAt, hUpdatedAt}) + listWorkflows(c, t) + t.Render() + + }, +} + +func listWorkflows(c *cobra.Command, t table.Writer) { + list, err := client.WorkflowClient.ListWorkflows(context.Background(), &workflow.Empty{}) + if err != nil { + log.Fatal(err) + } + + var wf *workflow.Workflow + err = nil + for wf, err = list.Recv(); err == nil && wf.Id != ""; wf, err = list.Recv() { + cr := *wf.CreatedAt + up := *wf.UpdatedAt + t.AppendRows([]table.Row{ + {wf.Id, wf.Template, wf.Target, time.Unix(cr.Seconds, 0), time.Unix(up.Seconds, 0)}, + }) + } + + if err != nil && err != io.EOF { + log.Fatal(err) + } +} + +func init() { + listCmd.DisableFlagsInUseLine = true + SubCommands = append(SubCommands, listCmd) +} diff --git a/cmd/tinkerbell/cmd/workflow/state.go b/cmd/tinkerbell/cmd/workflow/state.go new file mode 100644 index 000000000..c654c2da8 --- /dev/null +++ b/cmd/tinkerbell/cmd/workflow/state.go @@ -0,0 +1,69 @@ +package workflow + +import ( + "context" + "fmt" + "log" + "os" + "strconv" + + "github.com/jedib0t/go-pretty/table" + "github.com/packethost/tinkerbell/client" + "github.com/packethost/tinkerbell/protos/workflow" + "github.com/spf13/cobra" +) + +// getCmd represents the get subcommand for workflow command +var stateCmd = &cobra.Command{ + Use: "state [id]", + Short: "get the current workflow state", + Example: "tinkerbell workflow state [id]", + Args: func(c *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("%v requires an argument", c.UseLine()) + } + return validateID(args[0]) + }, + Run: func(c *cobra.Command, args []string) { + for _, arg := range args { + req := workflow.GetRequest{Id: arg} + t := table.NewWriter() + t.SetOutputMirror(os.Stdout) + t.AppendHeader(table.Row{"Field Name", "Values"}) + wf, err := client.WorkflowClient.GetWorkflowContext(context.Background(), &req) + if err != nil { + log.Fatal(err) + } + wfProgress := calWorkflowProgress(wf.CurrentActionIndex, wf.TotalNumberOfActions, wf.CurrentActionState) + t.AppendRow(table.Row{"Workflow ID", wf.WorkflowId}) + t.AppendRow(table.Row{"Workflow Progress", wfProgress}) + t.AppendRow(table.Row{"Current Task", wf.CurrentTask}) + t.AppendRow(table.Row{"Current Action", wf.CurrentAction}) + t.AppendRow(table.Row{"Current Worker", wf.CurrentWorker}) + t.AppendRow(table.Row{"Current Action State", wf.CurrentActionState}) + + t.Render() + + } + }, +} + +func calWorkflowProgress(cur int64, total int64, state workflow.ActionState) string { + if total == 0 || (cur == 0 && state != workflow.ActionState_ACTION_SUCCESS) { + return "0%" + } + var taskCompleted int64 + if state == workflow.ActionState_ACTION_SUCCESS { + taskCompleted = cur + 1 + } else { + taskCompleted = cur + } + progress := (taskCompleted * 100) / total + perc := strconv.Itoa(int(progress)) + "%" + + return fmt.Sprintf("%s", perc) +} + +func init() { + SubCommands = append(SubCommands, stateCmd) +} diff --git a/cmd/tinkerbell/main.go b/cmd/tinkerbell/main.go new file mode 100644 index 000000000..efbe93aa1 --- /dev/null +++ b/cmd/tinkerbell/main.go @@ -0,0 +1,7 @@ +package main + +import "github.com/packethost/tinkerbell/cmd/tinkerbell/cmd" + +func main() { + cmd.Execute() +} diff --git a/cmd/tinkerbell/sample.tmpl b/cmd/tinkerbell/sample.tmpl new file mode 100644 index 000000000..01c7c6bba --- /dev/null +++ b/cmd/tinkerbell/sample.tmpl @@ -0,0 +1,29 @@ +version: '0.1' +name: packet_osie_provision +global_timeout: 600 +tasks: +- name: "OS Installation" + worker: "{{index .Targets "machine1" "mac_addr"}}" + actions: + - name: "server_partitioning" + image: hello-world + timeout: 60 + on-timeout: do_partion recover -timeout + on-failure: do_partion recover -failure + volumes: + - ./host-path:/container-path + environment: + key: value + - name: os_install + image: hello-world + timeout: 60 + on-timeout: "os_install -timeout" + on-failure: "os_install -failure" +- name: "Updated DB Entries" + worker: "{{index .Targets "machine1" "mac_addr"}}" + actions: + - name: "update_db" + image: hello-world + timeout: 50 + on-timeout: "tinkerbell_client update-timeout" + on-failure: "tinkerbell_client update-failed" diff --git a/db/db.go b/db/db.go new file mode 100644 index 000000000..8505d7bd0 --- /dev/null +++ b/db/db.go @@ -0,0 +1,74 @@ +package db + +import ( + "context" + "database/sql" + + "github.com/lib/pq" + "github.com/packethost/pkg/log" + "github.com/pkg/errors" +) + +var logger log.Logger + +// ConnectDB returns a connection to postgres database +func ConnectDB(lg log.Logger) *sql.DB { + logger = lg + db, err := sql.Open("postgres", "") + if err != nil { + logger.Error(err) + panic(err) + } + if err := truncate(db); err != nil { + if pqErr := Error(err); pqErr != nil { + logger.With("detail", pqErr.Detail, "where", pqErr.Where).Error(err) + } + panic(err) + } + return db +} + +// Error returns the underlying cause for error +func Error(err error) *pq.Error { + if pqErr, ok := errors.Cause(err).(*pq.Error); ok { + return pqErr + } + return nil +} + +func truncate(db *sql.DB) error { + tx, err := db.BeginTx(context.Background(), &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec("TRUNCATE hardware") + if err != nil { + return errors.Wrap(err, "TRUNCATE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "TRUNCATE") + } + return err +} + +func get(ctx context.Context, db *sql.DB, query string, args ...interface{}) (string, error) { + row := db.QueryRowContext(ctx, query, args...) + + buf := []byte{} + err := row.Scan(&buf) + if err == nil { + return string(buf), nil + } + + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + } else { + err = nil + } + + return "", err +} diff --git a/db/hardware.go b/db/hardware.go new file mode 100644 index 000000000..05ee294cb --- /dev/null +++ b/db/hardware.go @@ -0,0 +1,178 @@ +package db + +import ( + "context" + "database/sql" + "time" + + "github.com/pkg/errors" +) + +// DeleteFromDB : delete data from hardware table +func DeleteFromDB(ctx context.Context, db *sql.DB, id string) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + UPDATE hardware + SET + deleted_at = NOW() + WHERE + id = $1; + `, id) + + if err != nil { + return errors.Wrap(err, "DELETE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// InsertIntoDB : insert data into hardware table +func InsertIntoDB(ctx context.Context, db *sql.DB, data string) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + INSERT INTO + hardware (inserted_at, id, data) + VALUES + ($1, ($2::jsonb ->> 'id')::uuid, $2) + ON CONFLICT (id) + DO + UPDATE SET + (inserted_at, deleted_at, data) = ($1, NULL, $2); + `, time.Now(), data) + if err != nil { + return errors.Wrap(err, "INSERT") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// GetByMAC : get data by machine mac +func GetByMAC(ctx context.Context, db *sql.DB, mac string) (string, error) { + arg := ` + { + "network_ports": [ + { + "data": { + "mac": "` + mac + `" + } + } + ] + } + ` + query := ` + SELECT data + FROM hardware + WHERE + deleted_at IS NULL + AND + data @> $1 + ` + + return get(ctx, db, query, arg) +} + +// GetByIP : get data by machine ip +func GetByIP(ctx context.Context, db *sql.DB, ip string) (string, error) { + instance := ` + { + "instance": { + "ip_addresses": [ + { + "address": "` + ip + `" + } + ] + } + } + ` + hardwareOrManagement := ` + { + "ip_addresses": [ + { + "address": "` + ip + `" + } + ] + } + ` + + query := ` + SELECT data + FROM hardware + WHERE + deleted_at IS NULL + AND ( + data @> $1 + OR + data @> $2 + ) + ` + + return get(ctx, db, query, instance, hardwareOrManagement) +} + +// GetByID : get data by machine id +func GetByID(ctx context.Context, db *sql.DB, id string) (string, error) { + arg := id + + query := ` + SELECT data + FROM hardware + WHERE + deleted_at IS NULL + AND + id = $1 + ` + return get(ctx, db, query, arg) +} + +// GetAll : get data for all machine +func GetAll(db *sql.DB, fn func(string) error) error { + rows, err := db.Query(` + SELECT data + FROM hardware + WHERE + deleted_at IS NULL + `) + + if err != nil { + return err + } + + defer rows.Close() + buf := []byte{} + for rows.Next() { + err = rows.Scan(&buf) + if err != nil { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + return err + } + + err = fn(string(buf)) + if err != nil { + return err + } + + } + + err = rows.Err() + if err == sql.ErrNoRows { + err = nil + } + return err +} diff --git a/db/target.go b/db/target.go new file mode 100644 index 000000000..a73f4aa19 --- /dev/null +++ b/db/target.go @@ -0,0 +1,117 @@ +package db + +import ( + "context" + "database/sql" + "time" + + "github.com/pkg/errors" +) + +// InsertIntoTargetDB : Push targets data in target table +func InsertIntoTargetDB(ctx context.Context, db *sql.DB, data string, uuid string) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + INSERT INTO + targets (inserted_at, id, data) + VALUES + ($1, $2, $3) + ON CONFLICT (id) + DO + UPDATE SET + (inserted_at, deleted_at, data) = ($1, NULL, $3); + `, time.Now(), uuid, data) + if err != nil { + return errors.Wrap(err, "INSERT") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// TargetsByID : Get the targets data which belongs to the input id +func TargetsByID(ctx context.Context, db *sql.DB, id string) (string, error) { + arg := id + + query := ` + SELECT data + FROM targets + WHERE + deleted_at IS NULL + AND + id = $1 + ` + return get(ctx, db, query, arg) +} + +// DeleteFromTargetDB : Delete the targets which belong to the input id +func DeleteFromTargetDB(ctx context.Context, db *sql.DB, id string) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + UPDATE targets + SET + deleted_at = NOW() + WHERE + id = $1; + `, id) + + if err != nil { + return errors.Wrap(err, "DELETE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// ListTargets returns all saved targets which are not deleted +func ListTargets(db *sql.DB, fn func(id, n string) error) error { + rows, err := db.Query(` + SELECT id, data + FROM targets + WHERE + deleted_at IS NULL; + `) + + if err != nil { + return err + } + + defer rows.Close() + var ( + id string + data string + ) + + for rows.Next() { + err = rows.Scan(&id, &data) + if err != nil { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + return err + } + err = fn(id, data) + if err != nil { + return err + } + } + + err = rows.Err() + if err == sql.ErrNoRows { + err = nil + } + return err +} diff --git a/db/template.go b/db/template.go new file mode 100644 index 000000000..f288629ef --- /dev/null +++ b/db/template.go @@ -0,0 +1,178 @@ +package db + +import ( + "context" + "database/sql" + "time" + + "github.com/golang/protobuf/ptypes" + "github.com/golang/protobuf/ptypes/timestamp" + "github.com/pkg/errors" + uuid "github.com/satori/go.uuid" +) + +// CreateTemplate creates a new workflow template +func CreateTemplate(ctx context.Context, db *sql.DB, name string, data []byte, id uuid.UUID) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + INSERT INTO + template (created_at, updated_at, name, data, id) + VALUES + ($1, $1, $2, $3, $4) + ON CONFLICT (id) + DO + UPDATE SET + (updated_at, deleted_at, name, data) = ($1, NULL, $2, $3); + `, time.Now(), name, data, id) + if err != nil { + return errors.Wrap(err, "INSERT") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// GetTemplate returns a workflow template +func GetTemplate(ctx context.Context, db *sql.DB, id string) ([]byte, error) { + query := ` + SELECT data + FROM template + WHERE + id = $1 + AND + deleted_at IS NULL + ` + row := db.QueryRowContext(ctx, query, id) + buf := []byte{} + err := row.Scan(&buf) + if err == nil { + return buf, nil + } + + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + } else { + err = nil + } + + return []byte{}, nil +} + +// DeleteTemplate deletes a workflow template +func DeleteTemplate(ctx context.Context, db *sql.DB, name string) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + UPDATE template + SET + deleted_at = NOW() + WHERE + id = $1; + `, name) + if err != nil { + return errors.Wrap(err, "UPDATE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// ListTemplates returns all saved templates +func ListTemplates(db *sql.DB, fn func(id, n string, in, del *timestamp.Timestamp) error) error { + rows, err := db.Query(` + SELECT id, name, created_at, updated_at + FROM template + WHERE + deleted_at IS NULL; + `) + + if err != nil { + return err + } + + defer rows.Close() + var ( + id string + name string + createdAt time.Time + updatedAt time.Time + ) + + for rows.Next() { + err = rows.Scan(&id, &name, &createdAt, &updatedAt) + if err != nil { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + return err + } + + tCr, _ := ptypes.TimestampProto(createdAt) + tUp, _ := ptypes.TimestampProto(updatedAt) + err = fn(id, name, tCr, tUp) + if err != nil { + return err + } + } + + err = rows.Err() + if err == sql.ErrNoRows { + err = nil + } + return err +} + +// UpdateTemplate update a given template +func UpdateTemplate(ctx context.Context, db *sql.DB, name string, data []byte, id uuid.UUID) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + if data == nil && name != "" { + _, err = tx.Exec(` + UPDATE template + SET + updated_at = NOW(), name = $2 + WHERE + id = $1;`, id, name) + } else if data != nil && name == "" { + _, err = tx.Exec(` + UPDATE template + SET + updated_at = NOW(), data = $2 + WHERE + id = $1;`, id, data) + } else { + _, err = tx.Exec(` + UPDATE template + SET + updated_at = NOW(), name = $2, data = $3 + WHERE + id = $1; + `, id, name, data) + } + + if err != nil { + return errors.Wrap(err, "UPDATE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} diff --git a/db/workflow.go b/db/workflow.go new file mode 100644 index 000000000..67fb41b9d --- /dev/null +++ b/db/workflow.go @@ -0,0 +1,852 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net" + "os" + "strconv" + "strings" + "time" + + "github.com/docker/distribution/reference" + "github.com/golang/protobuf/ptypes" + "github.com/golang/protobuf/ptypes/timestamp" + pb "github.com/packethost/tinkerbell/protos/workflow" + "github.com/pkg/errors" + uuid "github.com/satori/go.uuid" + "gopkg.in/yaml.v2" +) + +type ( + // Workflow holds details about the workflow to be executed + wfYamlstruct struct { + Version string `yaml:"version"` + Name string `yaml:"name"` + ID string `yaml:"id"` + GlobalTimeout int `yaml:"global_timeout"` + Tasks []task `yaml:"tasks"` + } + + // Task represents a task to be performed in a worflow + task struct { + Name string `yaml:"name"` + WorkerAddr string `yaml:"worker"` + Actions []action `yaml:"actions"` + Volumes []string `yaml:"volumes"` + Environment map[string]string `yaml:"environment"` + } + + // Action is the basic executional unit for a workflow + action struct { + Name string `yaml:"name"` + Image string `yaml:"image"` + Timeout int64 `yaml:"timeout"` + Command []string `yaml:"command"` + OnTimeout []string `yaml:"on-timeout"` + OnFailure []string `yaml:"on-failure"` + Volumes []string `yaml:"volumes,omitempty"` + Environment map[string]string `yaml:"environment,omitempty"` + } +) + +// Workflow represents a workflow instance in database +type Workflow struct { + State int32 + ID, Target, Template string + CreatedAt, UpdatedAt *timestamp.Timestamp +} + +var ( + defaultMaxVersions = 3 + maxVersions = defaultMaxVersions // maximum number of workflow data versions to be kept in database +) + +// CreateWorkflow creates a new workflow +func CreateWorkflow(ctx context.Context, db *sql.DB, wf Workflow, data string, id uuid.UUID) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + err = insertActionList(ctx, db, data, id, tx) + if err != nil { + return errors.Wrap(err, "Failed to insert in workflow_state") + + } + err = insertInWorkflow(ctx, db, wf, tx) + if err != nil { + return errors.Wrap(err, "Failed to workflow") + + } + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +func insertInWorkflow(ctx context.Context, db *sql.DB, wf Workflow, tx *sql.Tx) error { + _, err := tx.Exec(` + INSERT INTO + workflow (created_at, updated_at, template, target, id) + VALUES + ($1, $1, $2, $3, $4) + ON CONFLICT (id) + DO + UPDATE SET + (updated_at, deleted_at, template, target) = ($1, NULL, $2, $3); + `, time.Now(), wf.Template, wf.Target, wf.ID) + if err != nil { + return errors.Wrap(err, "INSERT in to workflow") + } + return nil +} + +func insertIntoWfWorkerTable(ctx context.Context, db *sql.DB, wfID uuid.UUID, workerID uuid.UUID, tx *sql.Tx) error { + // TODO This command is not 100% reliable for concurrent write operations + _, err := tx.Exec(` + INSERT INTO + workflow_worker_map (workflow_id, worker_id) + SELECT $1, $2 + WHERE + NOT EXISTS ( + SELECT workflow_id FROM workflow_worker_map WHERE workflow_id = $1 AND worker_id = $2 + ); + `, wfID, workerID) + if err != nil { + return errors.Wrap(err, "INSERT in to workflow_worker_map") + } + return nil +} + +// Insert actions in the workflow_state table +func insertActionList(ctx context.Context, db *sql.DB, yamlData string, id uuid.UUID, tx *sql.Tx) error { + wfymldata, err := parseYaml([]byte(yamlData)) + if err != nil { + return err + } + err = validateTemplateValues(wfymldata.Tasks) + if err != nil { + return errors.Wrap(err, "Invalid Template") + } + var actionList []pb.WorkflowAction + var uniqueWorkerID uuid.UUID + for _, task := range wfymldata.Tasks { + taskEnvs := map[string]string{} + taskVolumes := map[string]string{} + for _, vol := range task.Volumes { + v := strings.Split(vol, ":") + taskVolumes[v[0]] = strings.Join(v[1:], ":") + } + for key, val := range task.Environment { + taskEnvs[key] = val + } + + workerID, err := getWorkerID(ctx, db, task.WorkerAddr) + if err != nil { + return err + } else if workerID == "" { + return fmt.Errorf("Target mentioned with refernece %s not found", task.WorkerAddr) + } + workerUID, err := uuid.FromString(workerID) + if err != nil { + return err + } + if uniqueWorkerID != workerUID { + err = insertIntoWfWorkerTable(ctx, db, id, workerUID, tx) + if err != nil { + return err + } + uniqueWorkerID = workerUID + } + for _, ac := range task.Actions { + acenvs := map[string]string{} + for key, val := range taskEnvs { + acenvs[key] = val + } + for key, val := range ac.Environment { + acenvs[key] = val + } + + envs := []string{} + for key, val := range acenvs { + envs = append(envs, key+"="+val) + } + + volumes := map[string]string{} + for k, v := range taskVolumes { + volumes[k] = v + } + + for _, vol := range ac.Volumes { + v := strings.Split(vol, ":") + volumes[v[0]] = strings.Join(v[1:], ":") + } + + ac.Volumes = []string{} + for k, v := range volumes { + ac.Volumes = append(ac.Volumes, k+":"+v) + } + + action := pb.WorkflowAction{ + TaskName: task.Name, + WorkerId: workerUID.String(), + Name: ac.Name, + Image: ac.Image, + Timeout: ac.Timeout, + Command: ac.Command, + OnTimeout: ac.OnTimeout, + OnFailure: ac.OnFailure, + Environment: envs, + Volumes: ac.Volumes, + } + actionList = append(actionList, action) + } + } + totalActions := int64(len(actionList)) + actionData, err := json.Marshal(actionList) + if err != nil { + return err + } + + _, err = tx.Exec(` + INSERT INTO + workflow_state (workflow_id, current_worker, current_task_name, current_action_name, current_action_state, action_list, current_action_index, total_number_of_actions) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8) + ON CONFLICT (workflow_id) + DO + UPDATE SET + (workflow_id, current_worker, current_task_name, current_action_name, current_action_state, action_list, current_action_index, total_number_of_actions) = ($1, $2, $3, $4, $5, $6, $7, $8); + `, id, "", "", "", 0, actionData, 0, totalActions) + if err != nil { + return errors.Wrap(err, "INSERT in to workflow_state") + } + return nil +} + +// InsertIntoWfDataTable : Insert ephemeral data in workflow_data table +func InsertIntoWfDataTable(ctx context.Context, db *sql.DB, req *pb.UpdateWorkflowDataRequest) error { + version, err := getLatestVersionWfData(ctx, db, req.GetWorkflowID()) + if err != nil { + return err + } + + //increment version + version = version + 1 + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + INSERT INTO + workflow_data (workflow_id, version, metadata, data) + VALUES + ($1, $2, $3, $4); + `, req.GetWorkflowID(), version, string(req.GetMetadata()), string(req.GetData())) + if err != nil { + return errors.Wrap(err, "INSERT Into workflow_data") + } + + if version > int32(maxVersions) { + cleanVersion := version - int32(maxVersions) + _, err = tx.Exec(` + UPDATE workflow_data + SET + data = NULL + WHERE + workflow_id = $1 AND version = $2; + `, req.GetWorkflowID(), cleanVersion) + if err != nil { + return errors.Wrap(err, "UPDATE") + } + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// GetfromWfDataTable : Give you the ephemeral data from workflow_data table +func GetfromWfDataTable(ctx context.Context, db *sql.DB, req *pb.GetWorkflowDataRequest) ([]byte, error) { + version := req.GetVersion() + if req.Version == 0 { + v, err := getLatestVersionWfData(ctx, db, req.GetWorkflowID()) + if err != nil { + return []byte(""), err + } + version = v + } + query := ` + SELECT data + FROM workflow_data + WHERE + workflow_id = $1 AND version = $2 + ` + row := db.QueryRowContext(ctx, query, req.GetWorkflowID(), version) + buf := []byte{} + err := row.Scan(&buf) + if err == nil { + return []byte(buf), nil + } + + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + } else { + err = nil + } + + return []byte{}, nil +} + +// GetWorkflowMetadata returns metadata wrt to the ephemeral data of a workflow +func GetWorkflowMetadata(ctx context.Context, db *sql.DB, req *pb.GetWorkflowDataRequest) ([]byte, error) { + version := req.GetVersion() + if req.Version == 0 { + v, err := getLatestVersionWfData(ctx, db, req.GetWorkflowID()) + if err != nil { + return []byte(""), err + } + version = v + } + query := ` + SELECT metadata + FROM workflow_data + WHERE + workflow_id = $1 AND version = $2 + ` + row := db.QueryRowContext(ctx, query, req.GetWorkflowID(), version) + buf := []byte{} + err := row.Scan(&buf) + if err == nil { + return []byte(buf), nil + } + + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + } else { + err = nil + } + + return []byte{}, nil +} + +// GetWorkflowDataVersion returns the latest version of data for a workflow +func GetWorkflowDataVersion(ctx context.Context, db *sql.DB, workflowID string) (int32, error) { + return getLatestVersionWfData(ctx, db, workflowID) +} + +// GetfromWfWorkflowTable : gives you the current workflow +func GetfromWfWorkflowTable(ctx context.Context, db *sql.DB, id string) ([]string, error) { + rows, err := db.Query(` + SELECT workflow_id + FROM workflow_worker_map + WHERE + worker_id = $1; + `, id) + if err != nil { + return nil, err + } + var wfID []string + defer rows.Close() + var workerID string + + for rows.Next() { + err = rows.Scan(&workerID) + if err != nil { + err = errors.Wrap(err, "SELECT from worflow_worker_map") + logger.Error(err) + return nil, err + } + wfID = append(wfID, workerID) + } + err = rows.Err() + if err == sql.ErrNoRows { + return nil, nil + } + return wfID, err +} + +// GetWorkflow returns a workflow +func GetWorkflow(ctx context.Context, db *sql.DB, id string) (Workflow, error) { + query := ` + SELECT template, target + FROM workflow + WHERE + id = $1 + AND + deleted_at IS NULL; + ` + row := db.QueryRowContext(ctx, query, id) + var tmp, tar string + err := row.Scan(&tmp, &tar) + if err == nil { + return Workflow{ID: id, Template: tmp, Target: tar}, nil + } + + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + } else { + err = nil + } + + return Workflow{}, nil +} + +// DeleteWorkflow deletes a workflow +func DeleteWorkflow(ctx context.Context, db *sql.DB, id string, state int32) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + DELETE FROM workflow_worker_map + WHERE + workflow_id = $1; + `, id) + if err != nil { + return errors.Wrap(err, "Delete Workflow Error") + } + + _, err = tx.Exec(` + DELETE FROM workflow_state + WHERE + workflow_id = $1; + `, id) + if err != nil { + return errors.Wrap(err, "Delete Workflow Error") + } + + _, err = tx.Exec(` + UPDATE workflow + SET + deleted_at = NOW() + WHERE + id = $1; + `, id) + if err != nil { + return errors.Wrap(err, "UPDATE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// ListWorkflows returns all workflows +func ListWorkflows(db *sql.DB, fn func(wf Workflow) error) error { + rows, err := db.Query(` + SELECT id, template, target, created_at, updated_at + FROM workflow + WHERE + deleted_at IS NULL; + `) + + if err != nil { + return err + } + + defer rows.Close() + var ( + id, tmp, tar string + crAt, upAt time.Time + ) + + for rows.Next() { + err = rows.Scan(&id, &tmp, &tar, &crAt, &upAt) + if err != nil { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + return err + } + + wf := Workflow{ + ID: id, + Template: tmp, + Target: tar, + } + wf.CreatedAt, _ = ptypes.TimestampProto(crAt) + wf.UpdatedAt, _ = ptypes.TimestampProto(upAt) + err = fn(wf) + if err != nil { + return err + } + } + err = rows.Err() + if err == sql.ErrNoRows { + err = nil + } + return err +} + +// UpdateWorkflow updates a given workflow +func UpdateWorkflow(ctx context.Context, db *sql.DB, wf Workflow, state int32) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + if wf.Target == "" && wf.Template != "" { + _, err = tx.Exec(` + UPDATE workflow + SET + updated_at = NOW(), template = $2 + WHERE + id = $1; + `, wf.ID, wf.Template) + } else if wf.Target != "" && wf.Template == "" { + _, err = tx.Exec(` + UPDATE workflow + SET + updated_at = NOW(), target = $2 + WHERE + id = $1; + `, wf.ID, wf.Target) + } else { + _, err = tx.Exec(` + UPDATE workflow + SET + updated_at = NOW(), template = $2, target = $3 + WHERE + id = $1; + `, wf.ID, wf.Template, wf.Target) + } + + if err != nil { + return errors.Wrap(err, "UPDATE") + } + + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// UpdateWorkflowState : update the current workflow state +func UpdateWorkflowState(ctx context.Context, db *sql.DB, wfContext *pb.WorkflowContext) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + _, err = tx.Exec(` + UPDATE workflow_state + SET current_task_name = $2, + current_action_name = $3, + current_action_state = $4, + current_worker = $5, + current_action_index = $6 + WHERE + workflow_id = $1; + `, wfContext.WorkflowId, wfContext.CurrentTask, wfContext.CurrentAction, wfContext.CurrentActionState, wfContext.CurrentWorker, wfContext.CurrentActionIndex) + if err != nil { + return errors.Wrap(err, "INSERT in to workflow_state") + } + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// GetWorkflowContexts : gives you the current workflow context +func GetWorkflowContexts(ctx context.Context, db *sql.DB, wfID string) (*pb.WorkflowContext, error) { + query := ` + SELECT current_worker, current_task_name, current_action_name, current_action_index, current_action_state, total_number_of_actions + FROM workflow_state + WHERE + workflow_id = $1; + ` + row := db.QueryRowContext(ctx, query, wfID) + var cw, ct, ca string + var cai, tact int64 + var cas pb.ActionState + err := row.Scan(&cw, &ct, &ca, &cai, &cas, &tact) + if err == nil { + return &pb.WorkflowContext{ + WorkflowId: wfID, + CurrentWorker: cw, + CurrentTask: ct, + CurrentAction: ca, + CurrentActionIndex: cai, + CurrentActionState: cas, + TotalNumberOfActions: tact}, nil + } + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT from worflow_state") + logger.Error(err) + } else { + err = nil + } + return &pb.WorkflowContext{}, nil +} + +// GetWorkflowActions : gives you the action list of workflow +func GetWorkflowActions(ctx context.Context, db *sql.DB, wfID string) (*pb.WorkflowActionList, error) { + query := ` + SELECT action_list + FROM workflow_state + WHERE + workflow_id = $1; + ` + row := db.QueryRowContext(ctx, query, wfID) + var actionList string + err := row.Scan(&actionList) + if err == nil { + actions := []*pb.WorkflowAction{} + err = json.Unmarshal([]byte(actionList), &actions) + return &pb.WorkflowActionList{ + ActionList: actions}, nil + } + if err != sql.ErrNoRows { + err = errors.Wrap(err, "SELECT from worflow_state") + logger.Error(err) + } else { + err = nil + } + return &pb.WorkflowActionList{}, nil +} + +// InsertIntoWorkflowEventTable : insert workflow event table +func InsertIntoWorkflowEventTable(ctx context.Context, db *sql.DB, wfEvent *pb.WorkflowActionStatus, time time.Time) error { + tx, err := db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelSerializable}) + if err != nil { + return errors.Wrap(err, "BEGIN transaction") + } + + // TODO "created_at" field should be set in worker and come in the request + _, err = tx.Exec(` + INSERT INTO + workflow_event (workflow_id, worker_id, task_name, action_name, execution_time, message, status, created_at) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8); + `, wfEvent.WorkflowId, wfEvent.WorkerId, wfEvent.TaskName, wfEvent.ActionName, wfEvent.Seconds, wfEvent.Message, wfEvent.ActionStatus, time) + if err != nil { + return errors.Wrap(err, "INSERT in to workflow_event") + } + err = tx.Commit() + if err != nil { + return errors.Wrap(err, "COMMIT") + } + return nil +} + +// ShowWorkflowEvents returns all workflows +func ShowWorkflowEvents(db *sql.DB, wfID string, fn func(wfs pb.WorkflowActionStatus) error) error { + rows, err := db.Query(` + SELECT worker_id, task_name, action_name, execution_time, message, status, created_at + FROM workflow_event + WHERE + workflow_id = $1 + ORDER BY + created_at ASC; + `, wfID) + + if err != nil { + return err + } + + defer rows.Close() + var ( + status int32 + secs int64 + id, tName, aName, msg string + evTime time.Time + ) + + for rows.Next() { + err = rows.Scan(&id, &tName, &aName, &secs, &msg, &status, &evTime) + if err != nil { + err = errors.Wrap(err, "SELECT") + logger.Error(err) + return err + } + createdAt, _ := ptypes.TimestampProto(evTime) + wfs := pb.WorkflowActionStatus{ + WorkerId: id, + TaskName: tName, + ActionName: aName, + Seconds: secs, + Message: msg, + ActionStatus: pb.ActionState(status), + CreatedAt: createdAt, + } + err = fn(wfs) + if err != nil { + return err + } + } + err = rows.Err() + if err == sql.ErrNoRows { + err = nil + } + return err +} + +func getLatestVersionWfData(ctx context.Context, db *sql.DB, wfID string) (int32, error) { + query := ` + SELECT COUNT(*) + FROM workflow_data + WHERE + workflow_id = $1; + ` + row := db.QueryRowContext(ctx, query, wfID) + var version int32 + err := row.Scan(&version) + if err != nil { + return -1, err + } + return version, nil +} + +func parseYaml(ymlContent []byte) (*wfYamlstruct, error) { + var workflow = wfYamlstruct{} + err := yaml.UnmarshalStrict(ymlContent, &workflow) + if err != nil { + return &wfYamlstruct{}, err + } + return &workflow, nil +} + +func getWorkerIDbyMac(ctx context.Context, db *sql.DB, mac string) (string, error) { + arg := ` + { + "network_ports": [ + { + "data": { + "mac": "` + mac + `" + } + } + ] + } + ` + query := ` + SELECT id + FROM hardware + WHERE + deleted_at IS NULL + AND + data @> $1 + ` + + return get(ctx, db, query, arg) +} + +func getWorkerIDbyIP(ctx context.Context, db *sql.DB, ip string) (string, error) { + instance := ` + { + "instance": { + "ip_addresses": [ + { + "address": "` + ip + `" + } + ] + } + } + ` + hardwareOrManagement := ` + { + "ip_addresses": [ + { + "address": "` + ip + `" + } + ] + } + ` + + query := ` + SELECT id + FROM hardware + WHERE + deleted_at IS NULL + AND ( + data @> $1 + OR + data @> $2 + ) + ` + + return get(ctx, db, query, instance, hardwareOrManagement) +} + +func getWorkerID(ctx context.Context, db *sql.DB, addr string) (string, error) { + _, err := net.ParseMAC(addr) + if err != nil { + ip := net.ParseIP(addr) + if ip == nil || ip.To4() == nil { + return "", fmt.Errorf("invalid worker address: %s", addr) + } + return getWorkerIDbyIP(ctx, db, addr) + + } + return getWorkerIDbyMac(ctx, db, addr) +} + +func isValidLength(name string) error { + if len(name) > 200 { + return fmt.Errorf("Task/Action Name %s in the Temlate as more than 200 characters", name) + } + return nil +} + +func isValidImageName(name string) error { + _, err := reference.ParseNormalizedNamed(name) + if err != nil { + fmt.Println(err) + return err + } + return nil +} + +func validateTemplateValues(tasks []task) error { + taskNameMap := make(map[string]struct{}) + for _, task := range tasks { + err := isValidLength(task.Name) + if err != nil { + return err + } + _, ok := taskNameMap[task.Name] + if ok { + return fmt.Errorf("Provided template has duplicate task name \"%s\"", task.Name) + } + taskNameMap[task.Name] = struct{}{} + actionNameMap := make(map[string]struct{}) + for _, action := range task.Actions { + err := isValidLength(action.Name) + if err != nil { + return err + } + err = isValidImageName(action.Image) + if err != nil { + return fmt.Errorf("Invalid Image name %s", action.Image) + } + + _, ok := actionNameMap[action.Name] + if ok { + return fmt.Errorf("Provided template has duplicate action name \"%s\" in task \"%s\"", action.Name, task.Name) + } + actionNameMap[action.Name] = struct{}{} + } + } + return nil +} + +func init() { + val := os.Getenv("MAX_WORKFLOW_DATA_VERSIONS") + if v, err := strconv.Atoi(val); err == nil { + maxVersions = v + } +} diff --git a/deploy/Dockerfile b/deploy/Dockerfile new file mode 100644 index 000000000..49a389e26 --- /dev/null +++ b/deploy/Dockerfile @@ -0,0 +1,3 @@ +FROM postgres:10-alpine + +ADD docker-entrypoint-initdb.d/ /docker-entrypoint-initdb.d/ diff --git a/deploy/docker-entrypoint-initdb.d/tinkerbell-init.sql b/deploy/docker-entrypoint-initdb.d/tinkerbell-init.sql new file mode 100644 index 000000000..dcb01f7fd --- /dev/null +++ b/deploy/docker-entrypoint-initdb.d/tinkerbell-init.sql @@ -0,0 +1,87 @@ +SET ROLE tinkerbell; + +CREATE TABLE IF NOT EXISTS hardware ( + id UUID UNIQUE + , inserted_at TIMESTAMPTZ + , deleted_at TIMESTAMPTZ + , data JSONB +); + +CREATE INDEX IF NOT EXISTS idx_id ON hardware (id); +CREATE INDEX IF NOT EXISTS idx_deleted_at ON hardware (deleted_at NULLS FIRST); +CREATE INDEX IF NOT EXISTS idxgin_type ON hardware USING GIN (data JSONB_PATH_OPS); + +CREATE TABLE IF NOT EXISTS template ( + id UUID UNIQUE NOT NULL + , name VARCHAR(200) NOT NULL + , created_at TIMESTAMPTZ + , updated_at TIMESTAMPTZ + , deleted_at TIMESTAMPTZ + , data BYTEA + + CONSTRAINT CK_name CHECK (name ~ '^[a-zA-Z0-9_-]*$') +); + +CREATE INDEX IF NOT EXISTS idx_tid ON template (id); +CREATE INDEX IF NOT EXISTS idx_tdeleted_at ON template (deleted_at NULLS FIRST); + +CREATE TABLE IF NOT EXISTS targets ( + id UUID UNIQUE + , inserted_at TIMESTAMPTZ + , deleted_at TIMESTAMPTZ + , data JSONB +); + +CREATE INDEX IF NOT EXISTS idx_rid ON targets (id); +CREATE INDEX IF NOT EXISTS idx_rdeleted_at ON targets (deleted_at NULLS FIRST); +CREATE INDEX IF NOT EXISTS idxgin_rtype ON targets USING GIN (data JSONB_PATH_OPS); + +CREATE TABLE IF NOT EXISTS workflow ( + id UUID UNIQUE NOT NULL + , template UUID NOT NULL + , target UUID NOT NULL + , created_at TIMESTAMPTZ + , updated_at TIMESTAMPTZ + , deleted_at TIMESTAMPTZ +); + +CREATE INDEX IF NOT EXISTS idx_wid ON workflow (id); +CREATE INDEX IF NOT EXISTS idx_wdeleted_at ON workflow (deleted_at NULLS FIRST); + +CREATE TABLE IF NOT EXISTS workflow_state ( + workflow_id UUID UNIQUE NOT NULL + , current_task_name VARCHAR(200) + , current_action_name VARCHAR(200) + , current_action_state SMALLINT + , current_worker VARCHAR(200) + , action_list JSONB + , current_action_index int + , total_number_of_actions INT +); + +CREATE INDEX IF NOT EXISTS idx_wfid ON workflow_state (workflow_id); + +CREATE TABLE IF NOT EXISTS workflow_event ( + workflow_id UUID NOT NULL + , worker_id UUID NOT NULL + , task_name VARCHAR(200) + , action_name VARCHAR(200) + , execution_time int + , message VARCHAR(200) + , status SMALLINT + , created_at TIMESTAMPTZ +); + +CREATE INDEX IF NOT EXISTS idx_event ON workflow_event (created_at); + +CREATE TABLE IF NOT EXISTS workflow_worker_map ( + workflow_id UUID NOT NULL + , worker_id UUID NOT NULL +); + +CREATE TABLE IF NOT EXISTS workflow_data ( + workflow_id UUID NOT NULL + , version INT + , metadata JSONB + , data JSONB +); \ No newline at end of file diff --git a/deploy/migrate b/deploy/migrate new file mode 100755 index 000000000..493b44903 --- /dev/null +++ b/deploy/migrate @@ -0,0 +1,4 @@ +#!/bin/sh + +# Import initial schema +psql -f /init.sql diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..496d4b465 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,162 @@ +version: '2.1' +services: + certs: + build: tls + volumes: + - ./certs:/certs + + tinkerbell: + build: . + environment: + FACILITY: ${FACILITY:-lab1} + PACKET_ENV: ${PACKET_ENV:-testing} + PACKET_VERSION: ${PACKET_VERSION:-5efab5ef3a42cb88f2d54f4ed3201c2dd6797b7d} + ROLLBAR_TOKEN: ${ROLLBAR_TOKEN:-9b78d0ad01d1467aa92c49c3a349b79d} + ROLLBAR_DISABLE: ${ROLLBAR_DISABLE:-0} + MAX_WORKFLOW_DATA_VERSIONS: 5 + PGDATABASE: tinkerbell + PGHOST: db + PGPASSWORD: tinkerbell + PGPORT: 5432 + PGSSLMODE: disable + PGUSER: tinkerbell + depends_on: + - "certs" + - "db" + healthcheck: + test: ["CMD-SHELL", "wget -qO- 127.0.0.1:42114/cert"] + interval: 5s + timeout: 2s + retries: 30 + volumes: + - ./certs:/certs/${FACILITY} + logging: + driver: fluentd + options: + tag: tinkerbell-server + ports: + - 42113:42113/tcp + - 42114:42114/tcp + + db: + build: + context: deploy + environment: + POSTGRES_DB: tinkerbell + POSTGRES_PASSWORD: tinkerbell + POSTGRES_USER: tinkerbell + ports: + - 5432:5432 + healthcheck: + test: ["CMD-SHELL", "pg_isready -U tinkerbell"] + interval: 1s + timeout: 1s + retries: 30 + logging: + driver: fluentd + options: + tag: db + depends_on: + - fluentbit + + cli: + build: + context: cmd/tinkerbell + environment: + TINKERBELL_GRPC_AUTHORITY: 127.0.0.1:42113 + TINKERBELL_CERT_URL: http://127.0.0.1:42114/cert + logging: + driver: fluentd + options: + tag: tinkerbell-cli + network_mode: host + + registry: + build: + context: registry + args: + REGISTRY_USERNAME: username + REGISTRY_PASSWORD: password + environment: + REGISTRY_HTTP_ADDR: 0.0.0.0:443 + REGISTRY_HTTP_TLS_CERTIFICATE: /certs/server.pem + REGISTRY_HTTP_TLS_KEY: /certs/server-key.pem + REGISTRY_AUTH: htpasswd + REGISTRY_AUTH_HTPASSWD_REALM: "Registry Realm" + REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd + volumes: + - ./certs:/certs + logging: + driver: fluentd + options: + tag: registry + network_mode: host + + boots: + build: + context: ../boots + network_mode: host + command: -dhcp-addr 0.0.0.0:67 -tftp-addr 127.0.0.1:69 -http-addr 127.0.0.1:80 -log-level DEBUG + environment: + API_AUTH_TOKEN: ${PACKET_API_AUTH_TOKEN:-PcyR6MvHb7wMmyYf9p8dJ2Dvnb9HxX8E} + API_CONSUMER_TOKEN: ${PACKET_CONSUMER_TOKEN:-djR2TAvbnkY92i8Ea2KFMZW6MusW1fk7qzeCUHgtnQRSsXnqxoCr6V2vhSxpqASf} + FACILITY_CODE: ${FACILITY:-lab1} + PACKET_ENV: ${PACKET_ENV:-testing} + PACKET_VERSION: ${PACKET_VERSION:-5efab5ef3a42cb88f2d54f4ed3201c2dd6797b7d} + ROLLBAR_TOKEN: ${ROLLBAR_TOKEN:-9b78d0ad01d1467aa92c49c3a349b79d} + ROLLBAR_DISABLE: ${ROLLBAR_DISABLE:-0} + MIRROR_HOST: ${MIRROR_HOST:-127.0.0.1} + CACHER_GRPC_AUTHORITY: 127.0.0.1:42111 + CACHER_CERT_URL: http://127.0.0.1:42112/cert + DNS_SERVERS: 8.8.8.8 + PUBLIC_IP: 127.0.0.1 + BOOTP_BIND: 127.0.0.1:67 + HTTP_BIND: 127.0.0.1:80 + SYSLOG_BIND: 127.0.0.1:514 + TFTP_BIND: 127.0.0.1:69 + DOCKER_REGISTRY: 127.0.0.1 + REGISTRY_USERNAME: username + REGISTRY_PASSWORD: password + TINKERBELL_GRPC_AUTHORITY: 127.0.0.1:42113 + TINKERBELL_CERT_URL: http://127.0.0.1:42114/cert + ELASTIC_SEARCH_URL: 127.0.0.1:9200 + depends_on: + - tinkerbell + logging: + driver: fluentd + options: + tag: tinkerbell + ports: + - 127.0.0.1:80:80/tcp + - 67:67/udp + - 69:69/udp + + elasticsearch: + image: elasticsearch:7.3.0 + ports: + - 9200:9200 + - 9300:9300 + environment: + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - discovery.type=single-node + + kibana: + image: kibana:7.3.0 + depends_on: + - elasticsearch + restart: always + environment: + ELASTICSEARCH_URL: http://elasticsearch:9200 + ports: + - 5601:5601 + + fluentbit: + image: fluent/fluent-bit:1.3 + ports: + - 24224:24224 + - 24224:24224/udp + depends_on: + - elasticsearch + volumes: + - ./fluent-bit.conf:/fluent-bit/etc/fluent-bit.conf + diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 000000000..3732da083 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,4 @@ +# Architecture + + +![](img/workflow-architecture.png) \ No newline at end of file diff --git a/docs/cli/README.md b/docs/cli/README.md new file mode 100644 index 000000000..9b74d4aa0 --- /dev/null +++ b/docs/cli/README.md @@ -0,0 +1,34 @@ +# Tinkerbell CLI Reference + +## tinkerbell + +Command line interface for Packet Workflow. + +### Synopsis + +Command line interface for Packet Workflow. The CLI allows you to update the hardware details with respect to a worker machine. It also enables you to create a template and a target which is eventually used to create a workflow. + +### Operations + +```shell + hardware tinkerbell hardware client + help Help about any command + target tinkerbell target client + template tinkerbell template client + workflow tinkerbell workflow client +``` + +### Options + +``` + -h, --help help for tinkerbell +``` + +### See Also + + - [tinkerbell hardware](hardware.md) - Hardware (worker) data operations + - [tinkerbell target](target.md) - Target operations + - [tinkerbell template](template.md) - Template operations + - [tinkerbell workflow](workflow.md) - Workflow operations + + diff --git a/docs/cli/hardware.md b/docs/cli/hardware.md new file mode 100644 index 000000000..97c9adc31 --- /dev/null +++ b/docs/cli/hardware.md @@ -0,0 +1,29 @@ +## tinkerbell hardware + +Hardware (worker) data operations. + +### Synopsis + +Hardware operations: +```shell + all Get all known hardware for facility + id Get hardware by id + ingest Trigger tinkerbell to ingest + ip Get hardware by any associated ip + mac Get hardware by any associated mac + push Push new hardware to tinkerbell + watch Register to watch an id for any changes +``` + +### Options + +``` + -h, --help help for hardware +``` + +### See Also + + - [tinkerbell target](target.md) - Target operations + - [tinkerbell template](template.md) - Template operations + - [tinkerbell workflow](workflow.md) - Workflow operations + diff --git a/docs/cli/target.md b/docs/cli/target.md new file mode 100644 index 000000000..76b1ad79a --- /dev/null +++ b/docs/cli/target.md @@ -0,0 +1,34 @@ +## tinkerbell target + +Target operations. + +### Synopsis + +Target operations: +```shell + create create a target + delete delete a target + get get a target + list list all targets + update update a target +``` + +### Options + +``` + -h, --help help for target +``` + +### Examples + + - The command below creates a workflow target and returns its UUID. + ```shell + $ tinkerbell target create '{"targets": {"machine1": {"mac_addr": "98:03:9b:4b:c5:34"}}}' + ``` + +### See Also + + - [tinkerbell hardware](hardware.md) - Hardware (worker) data operations + - [tinkerbell template](template.md) - Template operations + - [tinkerbell workflow](workflow.md) - Workflow operations + diff --git a/docs/cli/template.md b/docs/cli/template.md new file mode 100644 index 000000000..4c2210850 --- /dev/null +++ b/docs/cli/template.md @@ -0,0 +1,52 @@ +## tinkerbell template + +Template operations. + +### Synopsis + +Template operations: +```shell + create create a workflow template + delete delete a template + get get a template + list list all saved templates + update update a template +``` + +### Options + +``` + -h, --help help for target +``` + +### Examples + + - The following command creates a workflow template using the `sample.tmpl` file and save it as `sample`. It returns a UUID for the newly created template. + ```shell + $ tinkerbell template create -n -p + $ tinkerbell template create -n sample -p /tmp/sample.tmpl + ``` + + - List all the templates + ```shell + $ tinkerbell template list + ``` + + - Update the name of an existing template + ```shell + $ tinkerbell template update -n + $ tinkerbell template update edb80a56-b1f2-4502-abf9-17326324192b -n new-sample-template + ``` + + - Update an existing template and keep the same name + ```shell + $ tinkerbell template update -p + $ tinkerbell template update edb80a56-b1f2-4502-abf9-17326324192b -p /tmp/new-sample-template.tmpl + ``` + +### See Also + + - [tinkerbell hardware](hardware.md) - Hardware (worker) data operations + - [tinkerbell target](target.md) - Target operations + - [tinkerbell workflow](workflow.md) - Workflow operations + diff --git a/docs/cli/workflow.md b/docs/cli/workflow.md new file mode 100644 index 000000000..6d8cc2dc2 --- /dev/null +++ b/docs/cli/workflow.md @@ -0,0 +1,37 @@ +## tinkerbell workflow + +Workflow operations. + +### Synopsis + +Workflow operations: +```shell + create create a workflow + data get workflow data + delete delete a workflow + events show all events for a workflow + get get a workflow + list list all workflows + state get the current workflow context +``` + +### Options + +``` + -h, --help help for target +``` + +### Examples + + - Create a workflow using a template and a target + ```shell + $ tinkerbell workflow create -t -r + $ tinkerbell workflow create -t edb80a56-b1f2-4502-abf9-17326324192b -r 9356ae1d-6165-4890-908d-7860ed04b421 + ``` + +### See Also + + - [tinkerbell hardware](hardware.md) - Hardware (worker) data operations + - [tinkerbell target](target.md) - Target operations + - [tinkerbell template](template.md) - Template operations + diff --git a/docs/components.md b/docs/components.md new file mode 100644 index 000000000..8f1f235a1 --- /dev/null +++ b/docs/components.md @@ -0,0 +1,39 @@ +# Components + +### Boots + +Handles DHCP requests, hands out IPs, and serves up iPXE. It also uses the Tinkerbell client to pull and push hardware data. + +### Osie + +Installs operating systems and handles deprovisioning. + +### Tinkerbell + +Service responsible for processing workflows. It is comprised of a server and a CLI, which communicate over gRPC. The CLI is used to create a workflow along with its building blocks, i.e., a template and a target. + +### Hegel + +Metadata service used by Tinkerbell and Osie during provisioning. It collects data from Tinkerbell and transforms it into a JSON format to be consumed as metadata. + +### Database + +We use [PostgreSQL](https://www.postgresql.org/), also known as Postgres, as our data store. Postgres is a free and open-source relational database management system that emphasizes extensibility and technical standards compliance. It is designed to handle a range of workloads, from single machines to data warehouses or Web services with many concurrent users. + +### Image Repository + +Depending on your use case, you can choose to use [Quay](https://quay.io/) or [DockerHub](https://hub.docker.com/) as the registry to store component images. You can use the same registry to store all of the action images used for a workflow. + +On the other hand, if you want to keep things local, you can also setup a secure private Docker registry to hold all your images locally. + +### Fluent Bit + +[Fluent Bit](https://fluentbit.io/) is an open source and multi-platform Log Processor and Forwarder which allows you to collect data/logs from different sources, unify and send them to multiple destinations. The components write their logs to `stdout` and these logs are then collected by Fluent Bit and pushed to Elasticsearch. + +### Elasticsearch + +[Elasticsearch](https://www.elastic.co/) is a distributed, open source search and analytics engine for all types of data, including textual, numerical, geospatial, structured, and unstructured. Fluent Bit collects the logs from each component and pushes them into Elasticsearch for storage and analysis purposes. + +### Kibana + +[Kibana](https://www.elastic.co/kibana) lets you visualize your Elasticsearch data and navigate the Elastic Stack so you can do anything from tracking query load to understanding the way requests flow through your apps. \ No newline at end of file diff --git a/docs/concepts.md b/docs/concepts.md new file mode 100644 index 000000000..f4ed32cfa --- /dev/null +++ b/docs/concepts.md @@ -0,0 +1,107 @@ +# Concepts + +### Template + +A template is a Go template based definition that defines the overall flow of a workflow. A user must write a template based on a valid template format. Template can consist of custom variable which can be substituted before execution. For example, a target is defined separately and is substituted in a template at the time of creating a workflow. + +A template is stored as a blob in the database and is parsed later during the creation of a worflow. A user can CRUD a template using the CLI (`tinkerbell template`). Here is a sample workflow template: + +```yaml +version: '0.1' +name: ubuntu_provisioning +global_timeout: 6000 +tasks: +- name: "os-installation" + worker: "{{index .Targets "machine1" "mac_addr"}}" + volumes: + - /dev:/dev + - /dev/console:/dev/console + - /lib/firmware:/lib/firmware:ro + environment: + MIRROR_HOST: 192.168.1.2 + actions: + - name: "disk-wipe" + image: disk-wipe + timeout: 90 + - name: "disk-partition" + image: disk-partition + timeout: 600 + environment: + MIRROR_HOST: 192.168.1.3 + volumes: + - /statedir:/statedir + - name: "install-root-fs" + image: install-root-fs + timeout: 600 + - name: "install-grub" + image: install-grub + timeout: 600 + volumes: + - /statedir:/statedir +``` + +A template comprises Tasks, which are executed in a sequential manner. A task can consits multiple Actions. As can be in the above example, a task supports volumes and environment variables. The volumes and environment variables defined for a particular task level are inherited by each action in that particular task. + +It is important to note that an action can also have its own volumes and environment variables. Therefore, any entry at an action will overwrite the value defined at the task level. For example, in the above template the `MIRROR_HOST` environment variable defined at action `disk-partition` will overwrite the value defined at task level. However, the other actions will receive the original value defined at the task level. + + +### Targets + +Targets are mapping between the virtual worker name and the actual host. Currently we are refer targets with MAC or IP address. Here is a sample target definition: + +```json +{ + "machine1": { + "ip_addr": "192.168.1.2" + }, + "machine2" : { + "mac_addr": "ca:00:64:b8:2d:00" + } +} +``` + +A target can be accessed in template like (refer above template): + +``` +{{ index .Targets "machine1" "ip_addr"}} +{{ index .Targets "machine2" "mac_addr"}} +``` + +### Provisioner + +The provisioner machine is the main driver for executing a workflow. A provisioner houses the following components: + - Database (Postgres) + - Tinkerbell (CLI and server) + - Boots + - Hegel + - Image Registry (optional) + - Elasticsearch + - Fluent Bit + - Kibana + - NGINX + +It is upto you if you would like to divide these components into multiple servers. + +### Worker + +Any node that has its data being pushed into Tinkerbell can become a part of a workflow. A worker can be a part of multiple workflows. + +When the node boots, a worker container starts and connects with provisioner to check if there is any task (may be from different workflows) that it can execute. After the completion of an action, the worker sends action status to provisioner. When all workflows which are related to a worker are complete, a worker can terminate. + + +### Ephemeral Data + +The workers that are part of a workflow might require to share some data. This can take the form of a light JSON like below, or some binary files that other workers might require to complete their action. For instance, a worker may add the following data: + +```json + {"operating_system": "ubuntu_18_04", "mac_addr": "F5:C9:E2:99:BD:9B", "instance_id": "123e4567-e89b-12d3-a456-426655440000"} +``` + +The other worker may retrieve and use this data and eventually add some more: + +```json +{"operating_system": "ubuntu_18_04", "mac_addr": "F5:C9:E2:99:BD:9B", "instance_id": "123e4567-e89b-12d3-a456-426655440000", "ip_addresses": [{"address_family": 4, "address": "172.27.0.23", "cidr": 31, "private": true}]} +``` +![](img/ephemeral-data.png) + + diff --git a/docs/first-good-workflow.md b/docs/first-good-workflow.md new file mode 100644 index 000000000..00512cc22 --- /dev/null +++ b/docs/first-good-workflow.md @@ -0,0 +1,49 @@ +# First Good Workflow (Example) + +Here is an example workflow that provisions a worker machine with Ubuntu 18.04. + +### Prerequisite + +You have a setup ready with a Provisioner and a Worker node. If not, please follow the [steps](setup.md) here to complete the setup. + + +### Steps + - Login to the `provisioner` machine + - Copy the following folders/files from `test-provisioner` machine to the current machine at the `/packet/nginx/misc/osie/current/` location: + ```shell + /var/www/html/misc/osie/current/grub/ + /var/www/html/misc/osie/current/ubuntu_18_04 + /var/www/html/misc/osie/current/modloop-x86_64 + ``` + - Change directory to `tinkerbell`: + ```shell + $ cd ~/go/src/github.com/packethost/tinkerbell/ + ``` + - switch to `first-good-workflow` branch + ```shell + $ git checkout first-good-workflow + ``` + - Replace the MAC addresses in `push.sh` file with the MAC address of `tf-worker` machine which you have got in the terraform output + - Push the hardware data + ```shell + $ ./push.sh + ``` + - Create action images and push them in to the private registry: + ```shell + $ cd ~/go/src/github.com/packethost/tinkerbell/workflow-samples/ubuntu/v3/ + $ ./create_image.sh + ``` + - Create a target: + ```shell + $ tinkerbell target create '{"targets": {"machine1": {"mac_addr": ""}}}' + ``` + - Create a template: + ```shell + $ tinkerbell template create -n ubuntu-sample -p /root/go/src/github.com/packethost/tinkerbell/workflow-samples/ubuntu/ubuntu.tmpl + ``` + - Create a workflow: + ```shell + $ tinkerbell workflow create -t