Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support added to remove files; force flag feature added for file share #2

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
81ba902
support added to remove files; force flag feature added for file share
prjain-msft May 11, 2018
a50d9c2
added support for canceling job by writing cancel on stdin
prjain-msft May 14, 2018
3afffd9
log level input changed to string
prjain-msft May 14, 2018
7e3fd31
fixed test cases for the recent change 'log level input changed to st…
prjain-msft May 15, 2018
e2671e7
enum added for log level
prjain-msft May 15, 2018
d94e0d5
fixed the test_force_flag_set_to_false_upload test case
prjain-msft May 15, 2018
d7c30dc
temp front-end changes
prjain-msft May 16, 2018
9c896d1
readstdinforcancel changes; log enum changes; changes in copy downloa…
prjain-msft May 17, 2018
f34115d
fixed compilation errors
prjain-msft May 17, 2018
d16493c
code refactored in copydownloadblobenumerator
prjain-msft May 18, 2018
f7fbf25
fixed blobNameMatchesThePattern for linux
prjain-msft May 22, 2018
6e48b7d
include/exclude flag added for copy / resume command; test case added…
prjain-msft May 24, 2018
98f5d01
fixed infer argument location api; enabled the test case in init test…
prjain-msft May 24, 2018
b7ce0b7
fixed blob download
prjain-msft May 24, 2018
0b3ad8d
wildcard support added in remove; test case added for removing with w…
prjain-msft May 24, 2018
a817efa
relace filepath.Match with regexp Match
prjain-msft May 25, 2018
58528e4
fix for blob name match with pattern on linux
prjain-msft May 25, 2018
8e096cf
wildcard in sync implemented
prjain-msft May 26, 2018
0d5777a
fixed cancel command
prjain-msft May 31, 2018
8d54cc0
delete blob/file/ local file in case of failed / cancelled transfer
prjain-msft May 31, 2018
12b68a8
changed the log file open and initialize code; avoid opening of log f…
prjain-msft Jun 1, 2018
d8f454e
changed the cancellation cases in CancelPauseJobOrder for handling th…
prjain-msft Jun 1, 2018
2af23a0
add environment variables holding the test suite configuration
prjain-msft Jun 4, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 38 additions & 2 deletions cmd/cancel.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,16 @@ import (
"fmt"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/spf13/cobra"
"bufio"
"os"
"strings"
)

// created a signal channel to receive the Interrupt and Kill signal send to OS
// this channel is shared by copy, resume, sync and an independent go routine reading stdin
// for cancel command
var CancelChannel = make(chan os.Signal, 1)

type rawCancelCmdArgs struct {
jobID string
}
Expand All @@ -51,11 +59,10 @@ type cookedCancelCmdArgs struct {
func (cca cookedCancelCmdArgs) process() error {
var cancelJobResponse common.CancelPauseResumeResponse
Rpc(common.ERpcCmd.CancelJob(), cca.jobID, &cancelJobResponse)

if !cancelJobResponse.CancelledPauseResumed {
return fmt.Errorf("job cannot be cancelled because %s", cancelJobResponse.ErrorMsg)
}
fmt.Println(fmt.Sprintf("Job %s cancelled successfully", cca.jobID))
//fmt.Println(fmt.Sprintf("Job %s cancelled successfully", cca.jobID))
return nil
}

Expand Down Expand Up @@ -95,3 +102,32 @@ func init() {
}
rootCmd.AddCommand(cancelCmd)
}

// ReadStandardInputToCancelJob is a function that reads the standard Input
// If Input given is "cancel", it cancels the current job.
func ReadStandardInputToCancelJob(cancelChannel chan <- os.Signal) {
for {
consoleReader := bufio.NewReader(os.Stdin)
// ReadString reads input until the first occurrence of \n in the input,
input, err := consoleReader.ReadString('\n')
if err != nil {
return
}

//remove the delimiter "\n"
input = strings.Trim(input, "\n")
// remove trailing white spaces
input = strings.Trim(input, " ")
// converting the input characters to lower case characters
// this is done to avoid case sensitiveness.
input = strings.ToLower(input)

switch input {
case "cancel":
// send a kill signal to the cancel channel.
cancelChannel <- os.Kill
default:
panic(fmt.Errorf("command %s not supported by azcopy", input))
}
}
}
83 changes: 63 additions & 20 deletions cmd/copy.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,10 @@ import (
"sync"
"sync/atomic"
"time"

"github.com/Azure/azure-pipeline-go/pipeline"
"github.com/Azure/azure-storage-azcopy/common"
"github.com/Azure/azure-storage-blob-go/2017-07-29/azblob"
"github.com/spf13/cobra"
"strings"
)

// upload related
Expand Down Expand Up @@ -65,6 +64,7 @@ type rawCopyCmdArgs struct {
//blobUrlForRedirection string

// filters from flags
include string
exclude string
recursive bool
followSymlinks bool
Expand All @@ -85,7 +85,7 @@ type rawCopyCmdArgs struct {
background bool
outputJson bool
acl string
logVerbosity byte
logVerbosity string
}

// validates and transform raw input into cooked input
Expand All @@ -102,7 +102,6 @@ func (raw rawCopyCmdArgs) cook() (cookedCopyCmdArgs, error) {
cooked.fromTo = fromTo

// copy&transform flags to type-safety
cooked.exclude = raw.exclude
cooked.recursive = raw.recursive
cooked.followSymlinks = raw.followSymlinks
cooked.withSnapshots = raw.withSnapshots
Expand All @@ -117,6 +116,43 @@ func (raw rawCopyCmdArgs) cook() (cookedCopyCmdArgs, error) {
if err != nil {
return cooked, err
}
err = cooked.logVerbosity.Parse(raw.logVerbosity)
if err != nil{
return cooked, err
}

// initialize the include map which contains the list of files to be included
// parse the string passed in include flag
// more than one file are expected to be separated by ';'
cooked.include = make(map[string]int)
if len(raw.include) > 0 {
files := strings.Split(raw.include, ";")
for index := range files {
// If split of the include string leads to an empty string
// not include that string
if len(files[index]) == 0 {
continue
}
cooked.include[files[index]] = index
}
}

// initialize the exclude map which contains the list of files to be excluded
// parse the string passed in exclude flag
// more than one file are expected to be separated by ';'
cooked.exclude = make(map[string]int)
if len(raw.exclude) > 0 {
files := strings.Split(raw.exclude, ";")
for index := range files {
// If split of the include string leads to an empty string
// not include that string
if len(files[index]) == 0 {
continue
}
cooked.exclude[files[index]] = index
}
}

cooked.metadata = raw.metadata
cooked.contentType = raw.contentType
cooked.contentEncoding = raw.contentEncoding
Expand All @@ -125,8 +161,6 @@ func (raw rawCopyCmdArgs) cook() (cookedCopyCmdArgs, error) {
cooked.background = raw.background
cooked.outputJson = raw.outputJson
cooked.acl = raw.acl
cooked.logVerbosity = common.LogLevel(raw.logVerbosity)

return cooked, nil
}

Expand All @@ -138,7 +172,8 @@ type cookedCopyCmdArgs struct {
fromTo common.FromTo

// filters from flags
exclude string
include map[string]int
exclude map[string]int
recursive bool
followSymlinks bool
withSnapshots bool
Expand Down Expand Up @@ -367,6 +402,8 @@ func (cca cookedCopyCmdArgs) processCopyJobPartOrders() (err error) {
ForceWrite: cca.forceWrite,
Priority: common.EJobPriority.Normal(),
LogLevel: cca.logVerbosity,
Include:cca.include,
Exclude:cca.exclude,
BlobAttributes: common.BlobTransferAttributes{
BlockSizeInBytes: cca.blockSize,
ContentType: cca.contentType,
Expand Down Expand Up @@ -400,7 +437,11 @@ func (cca cookedCopyCmdArgs) processCopyJobPartOrders() (err error) {
err = e.enumerate(cca.src, cca.recursive, cca.dst, &wg, cca.waitUntilJobCompletion)
lastPartNumber = e.PartNum
case common.EFromTo.BlobTrash():
e := removeEnumerator(jobPartOrder)
e := removeBlobEnumerator(jobPartOrder)
err = e.enumerate(cca.src, cca.recursive, cca.dst, &wg, cca.waitUntilJobCompletion)
lastPartNumber = e.PartNum
case common.EFromTo.FileTrash():
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure if this word is suitable, would better be more friendly.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm OK with Trash. Do you want to suggest a different word?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I remember Microsoft has TSA which could help to scan special words not suitable to use. And I'm not sure if Trash can pass scan. I know Windows use Recycle bin for deleted files. What about using Recycle?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't care too much about this but I'd be OK with BlobRemove/FileRemove. This solidifies the link between Remove and removeEnumerator.

e := removeFileEnumerator(jobPartOrder)
err = e.enumerate(cca.src, cca.recursive, cca.dst, &wg, cca.waitUntilJobCompletion)
lastPartNumber = e.PartNum
}
Expand Down Expand Up @@ -428,26 +469,26 @@ func (cca cookedCopyCmdArgs) processCopyJobPartOrders() (err error) {
}

func (cca cookedCopyCmdArgs) waitUntilJobCompletion(jobID common.JobID, wg *sync.WaitGroup) {
// created a signal channel to receive the Interrupt and Kill signal send to OS
cancelChannel := make(chan os.Signal, 1)
// cancelChannel will be notified when os receives os.Interrupt and os.Kill signals
signal.Notify(cancelChannel, os.Interrupt, os.Kill)

// waiting for signals from either cancelChannel or timeOut Channel.
// CancelChannel will be notified when os receives os.Interrupt and os.Kill signals
signal.Notify(CancelChannel, os.Interrupt, os.Kill)

// waiting for signals from either CancelChannel or timeOut Channel.
// if no signal received, will fetch/display a job status update then sleep for a bit
startTime := time.Now()
bytesTransferredInLastInterval := uint64(0)
for {
select {
case <-cancelChannel:
fmt.Println("Cancelling Job")
cookedCancelCmdArgs{jobID: jobID}.process()
os.Exit(1)
case <-CancelChannel:
err := cookedCancelCmdArgs{jobID: jobID}.process()
if err != nil {
fmt.Println(fmt.Sprintf("error occurred while cancelling the job %s. Failed with error %s", jobID, err.Error()))
os.Exit(1)
}
default:
jobStatus := copyHandlerUtil{}.fetchJobStatus(jobID, &startTime, &bytesTransferredInLastInterval, cca.outputJson)

// happy ending to the front end
if jobStatus == common.EJobStatus.Completed() {
if jobStatus == common.EJobStatus.Completed() || jobStatus == common.EJobStatus.Cancelled() {
os.Exit(0)
}

Expand Down Expand Up @@ -532,6 +573,8 @@ Usage:

// define the flags relevant to the cp command
// filters
cpCmd.PersistentFlags().StringVar(&raw.include, "include", "", "Filter: only include these files when copying. " +
"Support use of *. More than one file are separated by ';'")
cpCmd.PersistentFlags().StringVar(&raw.exclude, "exclude", "", "Filter: Exclude these files when copying. Support use of *.")
cpCmd.PersistentFlags().BoolVar(&raw.recursive, "recursive", false, "Filter: Look into sub-directories recursively when uploading from local file system.")
cpCmd.PersistentFlags().BoolVar(&raw.followSymlinks, "follow-symlinks", false, "Filter: Follow symbolic links when uploading from local file system.")
Expand All @@ -550,5 +593,5 @@ Usage:
cpCmd.PersistentFlags().BoolVar(&raw.background, "background-op", false, "true if user has to perform the operations as a background operation")
cpCmd.PersistentFlags().BoolVar(&raw.outputJson, "output-json", false, "true if user wants the output in Json format")
cpCmd.PersistentFlags().StringVar(&raw.acl, "acl", "", "Access conditions to be used when uploading/downloading from Azure Storage.")
cpCmd.PersistentFlags().Uint8Var(&raw.logVerbosity, "Logging", uint8(pipeline.LogWarning), "defines the log verbosity to be saved to log file")
cpCmd.PersistentFlags().StringVar(&raw.logVerbosity, "Logging", "None", "defines the log verbosity to be saved to log file")
}
Loading