Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New mapping #52

Merged
merged 14 commits into from
Apr 5, 2023
216 changes: 103 additions & 113 deletions functions/allure_report/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,16 @@


def get_test_human_name(file):

exp = get_expectation_impl(get_test_name(file))
template_json = exp._prescriptive_renderer(configuration=ExpectationConfiguration(get_test_name(file), kwargs=get_params1(file)))[0]
template_json = \
exp._prescriptive_renderer(configuration=ExpectationConfiguration(get_test_name(file), kwargs=get_params1(file)))[0]
if type(template_json) is not dict:
template_json = template_json.to_json_dict()
template_str = template_json['string_template']['template']
params = get_params1(file)
result_string = template_str
new_params = {}
for key,value in params.items():
for key, value in params.items():
if type(value) == list:
if key == 'value_set':
for i in value:
Expand All @@ -57,15 +57,14 @@ def get_test_human_name(file):
return result_string


def get_json(json_name,validate_id):
def get_json(json_name, validate_id):
file_name = f"great_expectations/uncommitted/validations/{validate_id}.json"
content_object = s3.Object(qa_bucket, f"{qa_bucket}/{file_name}")
file_content = content_object.get()['Body'].read().decode('utf-8')
json_content = json.loads(file_content)
return json_content



def get_suit_status():
return "passed"

Expand All @@ -75,23 +74,25 @@ def get_test_name(file):


def get_suit_name(file, i):
return file['meta']['batch_kwargs']['data_asset_name'] + "." + i['expectation_config']['kwargs']['column'] if 'column' in i['expectation_config']['kwargs'] else file['meta']['batch_kwargs']['data_asset_name']
return f"{file['meta']['batch_kwargs']['data_asset_name']}.{i['expectation_config']['kwargs']['column']}" if "column" in \
i[
"expectation_config"][
"kwargs"] else \
file["meta"]["batch_kwargs"]["data_asset_name"]


def get_jira_ticket(file):
if 'Bug Ticket' in file['expectation_config']['meta']:

return {
"name": "Bug ticket",
"url": file['expectation_config']['meta']['Bug Ticket'],
"type": "issue"
}
"name": "Bug ticket",
"url": file['expectation_config']['meta']['Bug Ticket'],
"type": "issue"
}
else:
return {}




def get_severity(file):
return file['expectation_config']['meta']['Severity'] if 'Severity' in file['expectation_config']['meta'] else ""

Expand All @@ -105,7 +106,8 @@ def get_stop_suit_time():


def parse_datetime(date_str):
return datetime.timestamp(datetime. strptime(date_str, '%Y%m%dT%H%M%S.%fZ'))*1000
return datetime.timestamp(datetime.strptime(date_str, '%Y%m%dT%H%M%S.%fZ')) * 1000


def get_start_test_time(file):
return parse_datetime(file['meta']['run_id']['run_name'])
Expand All @@ -120,91 +122,90 @@ def get_params(file):
del params['result_format']
result = []
for param in params:
result.append({"name": param, "value": str(params[param])}) if isinstance(params[param], list) else result.append({"name": param, "value": params[param]})
result.append({"name": param, "value": str(params[param])}) if isinstance(params[param],
list) else result.append(
{"name": param, "value": params[param]})
return result


def get_params1(file):
params = file['expectation_config']['kwargs']
# del params['result_format']
return params


def get_test_status(file):
return "passed" if file['success'] is True else "failed"


def get_test_description(file):
result = ""
for f in file['result']:
if str(f)!='observed_value':
result = result +"\n" + str(f) + ": " + str(file['result'][f])+"\n"
for f in file["result"]:
if str(f) != "observed_value":
result = result + "\n" + f"{str(f)}: {str(file['result'][f])}" + "\n"
return result


def get_observed_value(file):
try:
return "Observed value: "+str(file['result']['observed_value']) if 'observed_value' in file['result'] else "Unexpected count: "+str(file['result']['unexpected_count'])
return f"Observed value: {str(file['result']['observed_value'])}" if "observed_value" in file[
"result"] else f"Unexpected count: {str(file['result']['unexpected_count'])}"
except KeyError:
return 'Column not exist'



def get_exception_message(file):
return file['exception_info']['exception_message']


def get_exception_traceback(file):
return file['exception_info']['exception_traceback']



def get_folder_key(folder,folder_key):


folder = folder + str(folder_key) + '/'
def get_folder_key(folder, folder_key):
folder = f"{folder}{str(folder_key)}/"
bucket.put_object(Key=folder)

return folder_key


def create_categories_json(json_name,key):
def create_categories_json(json_name, key):
data = [
{
"name": "Ignored tests",
"matchedStatuses": [
"skipped"
]
},
{
"name": "Passed tests",
"matchedStatuses": [
"passed"
]
},
{
"name": "Broken tests",
"matchedStatuses": [
"broken"
]
},
{
"name": "Failed tests",
"matchedStatuses": [
"failed"
]
}
]
{
"name": "Ignored tests",
"matchedStatuses": [
"skipped"
]
},
{
"name": "Passed tests",
"matchedStatuses": [
"passed"
]
},
{
"name": "Broken tests",
"matchedStatuses": [
"broken"
]
},
{
"name": "Failed tests",
"matchedStatuses": [
"failed"
]
}
]

result = json.dumps(data)
# with open("dags/reportsx/categories.json", "w") as file:
s3.Object(qa_bucket, "allure/"+json_name+key+"/result/categories.json").put(Body=bytes(result.encode('UTF-8')))

s3.Object(qa_bucket, f"allure/{json_name}{key}/result/categories.json").put(Body=bytes(result.encode("UTF-8")))



def get_uuid(i, json_name,key):
def get_uuid(i, json_name, key):
fl = ""
objs = list(bucket.objects.filter(Prefix='allure/'+json_name+key+'/allure-report/history'))
if(len(objs)>0):
objs = list(bucket.objects.filter(Prefix=f"allure/{json_name}{key}/allure-report/history"))
if (len(objs) > 0):

df = wr.s3.read_json(path=['s3://'+qa_bucket+'/allure/'+json_name+key+'/allure-report/history/history.json'])
df = wr.s3.read_json(path=[f"s3://{qa_bucket}/allure/{json_name}{key}/allure-report/history/history.json"])

fl = json.loads(df.to_json())
keys = list(fl.keys())
Expand All @@ -214,58 +215,55 @@ def get_uuid(i, json_name,key):
return datetime.now().strftime("%S%f")


def create_suit_json(json_name, key, validate_id):
bucket.put_object(Key=f"allure/{json_name}{key}/result/")

def create_suit_json(json_name,key,validate_id):
bucket.put_object(Key="allure/"+json_name+key+"/result/")

file = get_json(json_name,validate_id)
file = get_json(json_name, validate_id)
start_time = get_start_suit_time(file)
stop_time = get_stop_test_time(file)
# for i in range(len(file['results'])):
for i in file['results']:
uuid = str(get_uuid(list(file['results']).index(i), json_name, key))
data = {
"uuid": uuid,
"historyId": uuid,
"status": get_test_status(i),
"parameters": get_params(i),
"labels": [{
"name": "test",
"value": get_test_name(i)
}, {
"name": "suite",
"value": get_suit_name(file,i)
},
{
"name": "severity",
"value": get_severity(i)
}
],
"links": [get_jira_ticket(i)],
"name": get_test_name(i),
"description": get_test_description(i),
"statusDetails": {"known": False, "muted": False, "flaky": False,
"message": get_observed_value(i) if get_test_status(i)=='failed' else "",
"trace": get_exception_traceback(i)},
"start": start_time,
"stop": stop_time,
"steps": [
{
"status": get_test_status(i),
"name": get_test_human_name(i),
"start": get_start_test_time(file),
"stop": get_stop_test_time(file)
}]
}


"uuid": uuid,
"historyId": uuid,
"status": get_test_status(i),
"parameters": get_params(i),
"labels": [{
"name": "test",
"value": get_test_name(i)
}, {
"name": "suite",
"value": get_suit_name(file, i)
},
{
"name": "severity",
"value": get_severity(i)
}
],
"links": [get_jira_ticket(i)],
"name": get_test_name(i),
"description": get_test_description(i),
"statusDetails": {"known": False, "muted": False, "flaky": False,
"message": get_observed_value(i) if get_test_status(i) == 'failed' else "",
"trace": get_exception_traceback(i)},
"start": start_time,
"stop": stop_time,
"steps": [
{
"status": get_test_status(i),
"name": get_test_human_name(i),
"start": get_start_test_time(file),
"stop": get_stop_test_time(file)
}]
}

result = json.dumps(data)

s3.Object(qa_bucket, "allure/"+json_name+key+"/result/"+uuid+"-result.json").put(Body=bytes(result.encode('UTF-8')))
s3.Object(qa_bucket, f"allure/{json_name}{key}/result/{uuid}-result.json").put(
Body=bytes(result.encode("UTF-8")))


def transfer_folder(root_src_dir,root_dst_dir):
def transfer_folder(root_src_dir, root_dst_dir):
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
if not os.path.exists(dst_dir):
Expand All @@ -281,16 +279,8 @@ def transfer_folder(root_src_dir,root_dst_dir):
shutil.copy(src_file, dst_dir)





def create_json_report(json_name,cloudfront,folder_key,validate_id):
key = "/"+get_folder_key("allure/"+json_name+"/",folder_key)
create_suit_json(json_name,key,validate_id)
create_categories_json(json_name,key)
return cloudfront+"/allure/"+json_name+key+"/allure-report/index.html", json_name+key





def create_json_report(json_name, cloudfront, folder_key, validate_id):
key = "/" + get_folder_key(f"allure/{json_name}/", folder_key)
create_suit_json(json_name, key, validate_id)
create_categories_json(json_name, key)
return f"{cloudfront}/allure/{json_name}{key}/allure-report/index.html", json_name + key
Loading