-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathazure-pipelines.yml
78 lines (70 loc) · 2.35 KB
/
azure-pipelines.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
pr: none
trigger: none
parameters:
- name: 'INPUT_FILE_NAME'
displayName: 'Input file name'
type: string
default: 'bulk-message/beta_testers.csv'
- name: 'API_KEY'
displayName: 'Api key'
type: string
- name: 'MESSAGE_FILE_NAME'
displayName: 'Message file name'
type: string
- name: 'MAX_ROWS_FILE'
displayName: 'How many rows will contains the files (a low value increase delivery speed, max value is 100000)'
type: string
default: '50000'
pool:
vmImage: ubuntu-latest
steps:
- bash: |
TIMESTAMP=$(date +%s)
BASE_PATH="bulk-message"
echo "TIMESTAMP: ${TIMESTAMP}"
echo "INPUT_FILE_NAME: ${ENV_INPUT_FILE_NAME}"
echo "MESSAGE_FILE_NAME: ${ENV_MESSAGE_FILE_NAME}"
echo "MAX_ROWS_FILE: ${ENV_MAX_ROWS_FILE}"
az storage blob download \
--account-name iopstexportdata \
--container-name input \
--name "${ENV_INPUT_FILE_NAME}" \
--file "${TIMESTAMP}.txt" \
--account-key "${ENV_STORAGE_KEY}"
mkdir -p output
split --lines="${ENV_MAX_ROWS_FILE}" "${TIMESTAMP}.txt" "output/splitted_"
wc -l output/splitted_*
az config set extension.use_dynamic_install=yes_without_prompt
az storage fs directory upload \
--account-name iopstexportdata \
--file-system input \
--source "output/splitted_*" \
--destination-path "${BASE_PATH}/${TIMESTAMP}" \
--recursive \
--account-key "${ENV_STORAGE_KEY}"
cd output
for filename in splitted_*; do
[ -e "$filename" ] || continue
sleep 2
echo "${BASE_PATH}/${TIMESTAMP}/${filename}"
curl --location --request POST "${ENV_LOGIC_APP_URL}" \
--header 'Content-Type: application/json' \
--data-raw '{
"filename": "/'"${BASE_PATH}/${TIMESTAMP}/${filename}"'",
"api_key": "'"${ENV_API_KEY}"'",
"message_filename": "'"${ENV_MESSAGE_FILE_NAME}"'"
}'
done
env:
ENV_INPUT_FILE_NAME: '${{ parameters.INPUT_FILE_NAME }}'
ENV_MAX_ROWS_FILE: '${{ parameters.MAX_ROWS_FILE }}'
ENV_MESSAGE_FILE_NAME: '${{ parameters.MESSAGE_FILE_NAME }}'
ENV_API_KEY: '${{ parameters.API_KEY }}'
ENV_STORAGE_KEY: '$(STORAGE_KEY)'
ENV_LOGIC_APP_URL: '$(LOGIC_APP_URL)'
displayName: 'split and trigger logic app'
- bash: |
rm -rf *.txt
rm -rf output
condition: succeededOrFailed()
displayName: 'clean up'