From cdd49f847bf716c52d43eab5eb916677df00915f Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Mon, 26 Feb 2024 19:28:57 +0100 Subject: [PATCH 01/24] Add retry logic for HTTP requests --- businessCentral/app/src/Http.Codeunit.al | 41 ++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/businessCentral/app/src/Http.Codeunit.al b/businessCentral/app/src/Http.Codeunit.al index 6e3488c..bce589b 100644 --- a/businessCentral/app/src/Http.Codeunit.al +++ b/businessCentral/app/src/Http.Codeunit.al @@ -109,7 +109,12 @@ codeunit 82563 "ADLSE Http" HttpContent: HttpContent; HeaderKey: Text; HeaderValue: Text; + MaxRetries: Integer; + RetryCount: Integer; begin + MaxRetries := 5; + RetryCount := 0; + HttpClient.SetBaseAddress(Url); if not AddAuthorization(HttpClient, Response) then exit(false); @@ -131,7 +136,17 @@ codeunit 82563 "ADLSE Http" HttpRequestMessage.Method('PUT'); HttpRequestMessage.SetRequestUri(Url); AddContent(HttpContent); - HttpClient.Put(Url, HttpContent, HttpResponseMessage); + while RetryCount < MaxRetries do begin + HttpClient.Put(Url, HttpContent, HttpResponseMessage); + if HttpResponseMessage.IsSuccessStatusCode() then + exit + else begin + RetryCount += 1; + if RetryCount >= MaxRetries then + exit; + Sleep(5000); + end; + end; end; "ADLSE Http Method"::Delete: HttpClient.Delete(Url, HttpResponseMessage); @@ -141,13 +156,33 @@ codeunit 82563 "ADLSE Http" HttpRequestMessage.SetRequestUri(Url); AddContent(HttpContent); HttpRequestMessage.Content(HttpContent); - HttpClient.Send(HttpRequestMessage, HttpResponseMessage); + while RetryCount < MaxRetries do begin + HttpClient.Send(HttpRequestMessage, HttpResponseMessage); + if HttpResponseMessage.IsSuccessStatusCode() then + exit + else begin + RetryCount += 1; + if RetryCount >= MaxRetries then + exit; + Sleep(5000); + end; + end; end; "ADLSE Http Method"::Head: begin HttpRequestMessage.Method('HEAD'); HttpRequestMessage.SetRequestUri(Url); - HttpClient.Send(HttpRequestMessage, HttpResponseMessage); + while RetryCount < MaxRetries do begin + HttpClient.Send(HttpRequestMessage, HttpResponseMessage); + if HttpResponseMessage.IsSuccessStatusCode() then + exit + else begin + RetryCount += 1; + if RetryCount >= MaxRetries then + exit; + Sleep(5000); + end; + end; end; else Error(UnsupportedMethodErr, HttpMethod); From 5bb0012fd9009fff332e414e4d8fe7c6b10f44e5 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Mon, 26 Feb 2024 22:03:59 +0100 Subject: [PATCH 02/24] Implement ResetTableExport function --- businessCentral/app/src/Communication.Codeunit.al | 11 +++++++++++ businessCentral/app/src/Setup.Page.al | 5 +++++ businessCentral/app/src/Setup.Table.al | 4 ++++ businessCentral/app/src/Table.Table.al | 6 ++++++ 4 files changed, 26 insertions(+) diff --git a/businessCentral/app/src/Communication.Codeunit.al b/businessCentral/app/src/Communication.Codeunit.al index 08262fe..6cb5f9e 100644 --- a/businessCentral/app/src/Communication.Codeunit.al +++ b/businessCentral/app/src/Communication.Codeunit.al @@ -32,6 +32,7 @@ codeunit 82562 "ADLSE Communication" EntitySchemaChangedErr: Label 'The schema of the table %1 has changed. %2', Comment = '%1 = Entity name, %2 = NotAllowedOnSimultaneousExportTxt'; CdmSchemaChangedErr: Label 'There may have been a change in the tables to export. %1', Comment = '%1 = NotAllowedOnSimultaneousExportTxt'; MSFabricUrlTxt: Label 'https://onelake.dfs.fabric.microsoft.com/%1/%2.Lakehouse/Files', Locked = true, Comment = '%1: Workspace name, %2: Lakehouse Name'; + ResetTableExportTxt: Label '/reset/%1.txt', Locked = true, comment = '%1 = Table name'; procedure SetupBlobStorage() var @@ -353,4 +354,14 @@ codeunit 82562 "ADLSE Communication" if ADLSESetup.GetStorageType() = ADLSESetup."Storage Type"::"Azure Data Lake" then ADLSEGen2Util.ReleaseBlob(BlobPath, ADLSECredentials, LeaseID); end; + + procedure ResetTableExport(ltableId: Integer) + var + ADLSEUtil: Codeunit "ADLSE Util"; + ADLSEGen2Util: Codeunit "ADLSE Gen 2 Util"; + Body: JsonObject; + begin + ADLSECredentials.Init(); + ADLSEGen2Util.CreateOrUpdateJsonBlob(GetBaseUrl() + StrSubstNo(ResetTableExportTxt, ADLSEUtil.GetDataLakeCompliantTableName(TableID)), ADLSECredentials, '', Body); + end; } diff --git a/businessCentral/app/src/Setup.Page.al b/businessCentral/app/src/Setup.Page.al index a54ddc8..ce4261e 100644 --- a/businessCentral/app/src/Setup.Page.al +++ b/businessCentral/app/src/Setup.Page.al @@ -148,6 +148,11 @@ page 82560 "ADLSE Setup" { ToolTip = 'Specifies if the enums will be exported as integers instead of strings. This is useful if you want to use the enums in Power BI.'; } + field("Delete Table"; Rec."Delete Table") + { + ToolTip = 'Specifies if the table will be deleted if a reset of the table is done.'; + Editable = not AzureDataLake; + } } } part(Tables; "ADLSE Setup Tables") diff --git a/businessCentral/app/src/Setup.Table.al b/businessCentral/app/src/Setup.Table.al index 9744ae7..1bab103 100644 --- a/businessCentral/app/src/Setup.Table.al +++ b/businessCentral/app/src/Setup.Table.al @@ -129,6 +129,10 @@ table 82560 "ADLSE Setup" Error(ErrorInfo.Create(NoSchemaExportedErr, true)); end; } + field(50; "Delete Table"; Boolean) + { + Caption = 'Delete table'; + } } keys diff --git a/businessCentral/app/src/Table.Table.al b/businessCentral/app/src/Table.Table.al index a587596..f01ea68 100644 --- a/businessCentral/app/src/Table.Table.al +++ b/businessCentral/app/src/Table.Table.al @@ -169,6 +169,8 @@ table 82561 "ADLSE Table" var ADLSEDeletedRecord: Record "ADLSE Deleted Record"; ADLSETableLastTimestamp: Record "ADLSE Table Last Timestamp"; + ADLSESetup: Record "ADLSE Setup"; + ADLSECommunication: Codeunit "ADLSE Communication"; Counter: Integer; begin if Rec.FindSet(true) then @@ -182,6 +184,10 @@ table 82561 "ADLSE Table" ADLSEDeletedRecord.SetRange("Table ID", Rec."Table ID"); ADLSEDeletedRecord.DeleteAll(); + ADLSESetup.GetSingleton(); + if ADLSESetup."Delete Table" then + ADLSECommunication.ResetTableExport(Rec."Table ID"); + OnAfterResetSelected(Rec); Counter += 1; From 5b1eafaf25e90a1c23f872eb98c306c905a7a228 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 15:17:41 +0100 Subject: [PATCH 03/24] Fix table ID variable in ADLSEGen2Util.CreateOrUpdateJsonBlob method --- businessCentral/app/src/Communication.Codeunit.al | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/businessCentral/app/src/Communication.Codeunit.al b/businessCentral/app/src/Communication.Codeunit.al index 6cb5f9e..1a2b9e7 100644 --- a/businessCentral/app/src/Communication.Codeunit.al +++ b/businessCentral/app/src/Communication.Codeunit.al @@ -362,6 +362,6 @@ codeunit 82562 "ADLSE Communication" Body: JsonObject; begin ADLSECredentials.Init(); - ADLSEGen2Util.CreateOrUpdateJsonBlob(GetBaseUrl() + StrSubstNo(ResetTableExportTxt, ADLSEUtil.GetDataLakeCompliantTableName(TableID)), ADLSECredentials, '', Body); + ADLSEGen2Util.CreateOrUpdateJsonBlob(GetBaseUrl() + StrSubstNo(ResetTableExportTxt, ADLSEUtil.GetDataLakeCompliantTableName(ltableId)), ADLSECredentials, '', Body); end; } From 6b14b1639cbfad7c87dff3618541bd90ca6f3df2 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 15:19:59 +0100 Subject: [PATCH 04/24] Adding reset functionality in the notebook --- fabric/CopyBusinessCentral.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fabric/CopyBusinessCentral.ipynb b/fabric/CopyBusinessCentral.ipynb index a82432f..821248b 100644 --- a/fabric/CopyBusinessCentral.ipynb +++ b/fabric/CopyBusinessCentral.ipynb @@ -1 +1 @@ -{"cells":[{"cell_type":"markdown","id":"ae53e9bf-8787-4d07-b709-d896fd16cc5f","metadata":{"editable":false,"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"source":["## Business Central merge data notebook\n","In this part the files in the delta folder will be merge with the Lakehouse table.\n","- It iterates first on the folders to append to the existing table.\n","- After that is will remove all duplicates by sorting the table. \n","- At last it will remove all deleted records inside the table that are deleted in Business Central\n","\n","Please change the parameters in the first part."]},{"cell_type":"code","execution_count":null,"id":"34dc5721-e317-4dc0-88ef-2c6bafb494da","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.6812441Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:06.8530455Z\",\"execution_finish_time\":\"2023-08-15T09:15:07.1828235Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","# settings\n","spark.conf.set(\"sprk.sql.parquet.vorder.enabled\",\"true\")\n","spark.conf.set(\"spark.microsoft.delta.optimizewrite.enabled\",\"true\")\n","spark.conf.set(\"spark.sql.parquet.filterPushdown\", \"true\")\n","spark.conf.set(\"spark.sql.parquet.mergeSchema\", \"false\")\n","spark.conf.set(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\")\n","spark.conf.set(\"spark.sql.delta.commitProtocol.enabled\", \"true\")\n","\n","# file paths\n","folder_path_spark = 'Files/deltas/' # this is mostly the default\n","folder_path_json = '/lakehouse/default/Files/' # this is mostly the default\n","folder_path = '/lakehouse/default/Files/deltas/' # this is mostly the default\n","\n","# parameters\n","workspace = 'fabricTest' #can also be a GUID\n","Lakehouse = 'businessCentral'; #can also be a GUID\n","Remove_delta = True; #will remove the delta files if everything is processed\n","Drop_table_if_mismatch = False; #option to drop the table if json file has different columns then in the table\n","no_Partition = 258 #how many partition is used in the dataframe, a good starting point might be 2-4 partitions per CPU core in your Spark cluster"]},{"cell_type":"code","execution_count":null,"id":"33ddc3d7","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import json\n","from pyspark.sql.types import *\n","\n","if Drop_table_if_mismatch:\n","\n"," def count_keys(obj): \n"," if isinstance(obj, dict): \n"," return len(obj) + sum(count_keys(v) for v in obj.values()) \n"," if isinstance(obj, list): \n"," return sum(count_keys(v) for v in obj) \n"," return 0 \n","\n"," for filename in os.listdir(folder_path_json):\n"," if \"manifest\" not in filename: # exclude the manifest files\n"," if filename.endswith(\".cdm.json\"):\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".cdm.json\",\"\")\n","\n"," if table_name in [t.name for t in spark.catalog.listTables()]:\n"," #count number of columns in excisting table\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," df = spark.sql(SQL_Query)\n"," num_cols_table = len(df.columns) \n","\n"," #count number of columns in json file \n"," f = open(folder_path_json + filename)\n"," schema = json.load(f)\n"," has_attributes = schema[\"definitions\"][0][\"hasAttributes\"] \n"," num_names = len(has_attributes)\n","\n"," if num_cols_table != num_names:\n"," df = spark.sql(\"DROP TABLE IF EXISTS\"+ Lakehouse + \".\" + table_name)"]},{"cell_type":"code","execution_count":null,"id":"0594c099-6512-4777-82e2-9a3a058512fe","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.7249665Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:07.7601315Z\",\"execution_finish_time\":\"2023-08-15T09:15:18.128035Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","collapsed":false,"editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","import json\n","import os\n","import glob\n","from pyspark.sql.types import *\n","from pyspark.sql.utils import AnalysisException\n","from pyspark.sql.functions import col\n","from pyspark.sql.functions import desc\n","file_list = []\n","\n","for entry in os.scandir(folder_path):\n"," if entry.is_dir():\n","\n"," for filename in glob.glob(folder_path + entry.name + '/*'): \n"," table_name = entry.name.replace(\"-\",\"\")\n"," ContainsCompany = False\n"," df_new = spark.read.option(\"minPartitions\", no_Partition).format(\"csv\").option(\"header\",\"true\").load(folder_path_spark + entry.name +\"/*\") \n"," file_list.append(filename) #collect the imported filed in a list for deletion later on\n","\n"," f = open(folder_path_json + entry.name +\".cdm.json\")\n"," schema = json.load(f)\n"," # Parse the schema to get column names and data types\n"," column_names = [attr[\"name\"] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," if '$Company' in column_names:\n"," ContainsCompany = True\n"," column_types = [attr['dataFormat'] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," for col_name, col_type in zip(column_names, column_types):\n"," if col_type == \"String\":\n"," col_type = \"string\"\n"," if col_type == \"Guid\":\n"," col_type = \"string\"\n"," if col_type == \"Code\":\n"," col_type = \"object\"\n"," if col_type == \"Option\":\n"," col_type = \"string\"\n"," if col_type == \"Date\":\n"," col_type = \"date\"\n"," if col_type == \"Time\":\n"," col_type = \"string\"\n"," if col_type == \"DateTime\":\n"," col_type = \"date\"\n"," if col_type == \"Duration\":\n"," col_type = \"timedelta\"\n"," if col_type == \"Decimal\":\n"," col_type = \"float\"\n"," if col_type == \"Boolean\":\n"," col_type = \"boolean\"\n"," if col_type == \"Integer\":\n"," col_type = \"int\"\n"," if col_type == \"Int64\":\n"," col_type = \"int\"\n"," if col_type == \"Int32\":\n"," col_type = \"int\"\n","\n"," df_new = df_new.withColumn(col_name, df_new[col_name].cast(col_type))\n","\n","\n"," #check if the table excists\n"," if table_name in [t.name for t in spark.catalog.listTables()]: \n"," #read the old data into a new dataframe and union with the new dataframe\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," #print(SQL_Query)\n"," df_old = spark.sql(SQL_Query)\n"," df_new = df_new.union(df_old).repartition(no_Partition)\n","\n"," #delete all old records\n"," df_deletes = df_new.filter(df_new['SystemCreatedAt-2000000001'].isNull())\n"," if ContainsCompany:\n"," df_new = df_new.join(df_deletes, ['$Company','systemId-2000000000'], 'leftanti')\n"," else:\n"," df_new = df_new.join(df_deletes, ['systemId-2000000000'], 'leftanti')\n"," \n"," # remove duplicates by filtering on systemID and systemModifiedAt fields\n"," if ContainsCompany:\n"," df_new = df_new.orderBy('$Company','systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['$Company','systemId-2000000000'])\n"," else:\n"," df_new = df_new.orderBy('systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['systemId-2000000000'])\n"," \n"," #overwrite the dataframe in the new table\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name) \n"," else: \n"," #table isn't there so just insert it\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name)\n","\n"," #delete the files\n"," if Remove_delta:\n"," for filename in file_list: \n"," try: \n"," os.remove(filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")\n"," file_list = [] # clear the list"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"host":{"synapse_widget":{"state":{},"token":"a69b4b72-86b0-4373-b695-ef01cd53bbb1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","known_lakehouses":"[{\"id\":\"9fbacb3e-d0df-43a4-814b-abe4cb623a81\"}]"}}},"language":"python","ms_spell_check":{"ms_spell_check_language":"en"}},"notebook_environment":{},"nteract":{"version":"nteract-front-end@1.0.0"},"save_output":true,"spark_compute":{"compute_id":"/trident/default","session_options":{"conf":{},"enableDebugMode":false}},"synapse_widget":{"state":{},"version":"0.1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","default_lakehouse_name":"businessCentral","default_lakehouse_workspace_id":"21a92229-a0fb-4256-86bd-4b847b8006ed","known_lakehouses":[{"id":"9fbacb3e-d0df-43a4-814b-abe4cb623a81"}]}},"widgets":{}},"nbformat":4,"nbformat_minor":5} +{"cells":[{"cell_type":"markdown","id":"ae53e9bf-8787-4d07-b709-d896fd16cc5f","metadata":{"editable":false,"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"source":["## Business Central merge data notebook\n","In this part the files in the delta folder will be merge with the Lakehouse table.\n","- It iterates first on the folders to append to the existing table.\n","- After that is will remove all duplicates by sorting the table. \n","- At last it will remove all deleted records inside the table that are deleted in Business Central\n","\n","Please change the parameters in the first part."]},{"cell_type":"code","execution_count":null,"id":"34dc5721-e317-4dc0-88ef-2c6bafb494da","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.6812441Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:06.8530455Z\",\"execution_finish_time\":\"2023-08-15T09:15:07.1828235Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","# settings\n","spark.conf.set(\"sprk.sql.parquet.vorder.enabled\",\"true\")\n","spark.conf.set(\"spark.microsoft.delta.optimizewrite.enabled\",\"true\")\n","spark.conf.set(\"spark.sql.parquet.filterPushdown\", \"true\")\n","spark.conf.set(\"spark.sql.parquet.mergeSchema\", \"false\")\n","spark.conf.set(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\")\n","spark.conf.set(\"spark.sql.delta.commitProtocol.enabled\", \"true\")\n","\n","# file paths\n","folder_path_spark = 'Files/deltas/' # this is mostly the default\n","folder_path_json = '/lakehouse/default/Files/' # this is mostly the default\n","folder_path_reset = '/lakehouse/default/Files/reset/' # this is mostly the default\n","folder_path = '/lakehouse/default/Files/deltas/' # this is mostly the default\n","\n","# parameters\n","workspace = 'businessCentral' #can also be a GUID\n","Lakehouse = 'businessCentral'; #can also be a GUID\n","Remove_delta = True; #will remove the delta files if everything is processed\n","Drop_table_if_mismatch = False; #option to drop the table if json file has different columns then in the table\n","no_Partition = 258 #how many partition is used in the dataframe, a good starting point might be 2-4 partitions per CPU core in your Spark cluster"]},{"cell_type":"code","execution_count":null,"id":"33ddc3d7","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import json\n","from pyspark.sql.types import *\n","\n","if Drop_table_if_mismatch:\n","\n"," def count_keys(obj): \n"," if isinstance(obj, dict): \n"," return len(obj) + sum(count_keys(v) for v in obj.values()) \n"," if isinstance(obj, list): \n"," return sum(count_keys(v) for v in obj) \n"," return 0 \n","\n"," for filename in os.listdir(folder_path_json):\n"," if \"manifest\" not in filename: # exclude the manifest files\n"," if filename.endswith(\".cdm.json\"):\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".cdm.json\",\"\")\n","\n"," if table_name in [t.name for t in spark.catalog.listTables()]:\n"," #count number of columns in excisting table\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," df = spark.sql(SQL_Query)\n"," num_cols_table = len(df.columns) \n","\n"," #count number of columns in json file \n"," f = open(folder_path_json + filename)\n"," schema = json.load(f)\n"," has_attributes = schema[\"definitions\"][0][\"hasAttributes\"] \n"," num_names = len(has_attributes)\n","\n"," if num_cols_table != num_names:\n"," df = spark.sql(\"DROP TABLE IF EXISTS\"+ Lakehouse + \".\" + table_name)"]},{"cell_type":"code","execution_count":null,"id":"5669531f","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import glob\n","from pyspark.sql.types import *\n","\n","for filename in os.listdir(folder_path_reset):\n"," # Remove the table\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".txt\",\"\")\n","\n"," df = spark.sql(\"DROP TABLE IF EXISTS \"+ Lakehouse + \".\" + table_name)\n","\n"," try: \n"," os.remove(folder_path_reset + '/' + filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")"]},{"cell_type":"code","execution_count":null,"id":"0594c099-6512-4777-82e2-9a3a058512fe","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.7249665Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:07.7601315Z\",\"execution_finish_time\":\"2023-08-15T09:15:18.128035Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","collapsed":false,"editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","import json\n","import os\n","import glob\n","from pyspark.sql.types import *\n","from pyspark.sql.utils import AnalysisException\n","from pyspark.sql.functions import col\n","from pyspark.sql.functions import desc\n","file_list = []\n","\n","for entry in os.scandir(folder_path):\n"," if entry.is_dir():\n","\n"," for filename in glob.glob(folder_path + entry.name + '/*'): \n"," table_name = entry.name.replace(\"-\",\"\")\n"," ContainsCompany = False\n"," df_new = spark.read.option(\"minPartitions\", no_Partition).format(\"csv\").option(\"header\",\"true\").load(folder_path_spark + entry.name +\"/*\") \n"," file_list.append(filename) #collect the imported filed in a list for deletion later on\n","\n"," f = open(folder_path_json + entry.name +\".cdm.json\")\n"," schema = json.load(f)\n"," # Parse the schema to get column names and data types\n"," column_names = [attr[\"name\"] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," if '$Company' in column_names:\n"," ContainsCompany = True\n"," column_types = [attr['dataFormat'] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," for col_name, col_type in zip(column_names, column_types):\n"," if col_type == \"String\":\n"," col_type = \"string\"\n"," if col_type == \"Guid\":\n"," col_type = \"string\"\n"," if col_type == \"Code\":\n"," col_type = \"object\"\n"," if col_type == \"Option\":\n"," col_type = \"string\"\n"," if col_type == \"Date\":\n"," col_type = \"date\"\n"," if col_type == \"Time\":\n"," col_type = \"string\"\n"," if col_type == \"DateTime\":\n"," col_type = \"date\"\n"," if col_type == \"Duration\":\n"," col_type = \"timedelta\"\n"," if col_type == \"Decimal\":\n"," col_type = \"float\"\n"," if col_type == \"Boolean\":\n"," col_type = \"boolean\"\n"," if col_type == \"Integer\":\n"," col_type = \"int\"\n"," if col_type == \"Int64\":\n"," col_type = \"int\"\n"," if col_type == \"Int32\":\n"," col_type = \"int\"\n","\n"," df_new = df_new.withColumn(col_name, df_new[col_name].cast(col_type))\n","\n","\n"," #check if the table excists\n"," if table_name in [t.name for t in spark.catalog.listTables()]: \n"," #read the old data into a new dataframe and union with the new dataframe\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," #print(SQL_Query)\n"," df_old = spark.sql(SQL_Query)\n"," df_new = df_new.union(df_old).repartition(no_Partition)\n","\n"," #delete all old records\n"," df_deletes = df_new.filter(df_new['SystemCreatedAt-2000000001'].isNull())\n"," if ContainsCompany:\n"," df_new = df_new.join(df_deletes, ['$Company','systemId-2000000000'], 'leftanti')\n"," else:\n"," df_new = df_new.join(df_deletes, ['systemId-2000000000'], 'leftanti')\n"," \n"," # remove duplicates by filtering on systemID and systemModifiedAt fields\n"," if ContainsCompany:\n"," df_new = df_new.orderBy('$Company','systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['$Company','systemId-2000000000'])\n"," else:\n"," df_new = df_new.orderBy('systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['systemId-2000000000'])\n"," \n"," #overwrite the dataframe in the new table\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name) \n"," else: \n"," #table isn't there so just insert it\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name)\n","\n"," #delete the files\n"," if Remove_delta:\n"," for filename in file_list: \n"," try: \n"," os.remove(filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")\n"," file_list = [] # clear the list"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"host":{"synapse_widget":{"state":{},"token":"a69b4b72-86b0-4373-b695-ef01cd53bbb1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","known_lakehouses":"[{\"id\":\"9fbacb3e-d0df-43a4-814b-abe4cb623a81\"}]"}}},"language":"python","ms_spell_check":{"ms_spell_check_language":"en"}},"notebook_environment":{},"nteract":{"version":"nteract-front-end@1.0.0"},"save_output":true,"spark_compute":{"compute_id":"/trident/default","session_options":{"conf":{},"enableDebugMode":false}},"synapse_widget":{"state":{},"version":"0.1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","default_lakehouse_name":"businessCentral","default_lakehouse_workspace_id":"21a92229-a0fb-4256-86bd-4b847b8006ed","known_lakehouses":[{"id":"9fbacb3e-d0df-43a4-814b-abe4cb623a81"}]}},"widgets":{}},"nbformat":4,"nbformat_minor":5} From 42ffc5e988c379e9b2df91477897093b8563537b Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 15:25:29 +0100 Subject: [PATCH 05/24] Add condition to delete table only if storage type is Microsoft Fabric --- businessCentral/app/src/Table.Table.al | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/businessCentral/app/src/Table.Table.al b/businessCentral/app/src/Table.Table.al index f01ea68..f5601db 100644 --- a/businessCentral/app/src/Table.Table.al +++ b/businessCentral/app/src/Table.Table.al @@ -185,7 +185,7 @@ table 82561 "ADLSE Table" ADLSEDeletedRecord.DeleteAll(); ADLSESetup.GetSingleton(); - if ADLSESetup."Delete Table" then + if (ADLSESetup."Delete Table") and (ADLSESetup."Storage Type" = ADLSESetup."Storage Type"::"Microsoft Fabric") then ADLSECommunication.ResetTableExport(Rec."Table ID"); OnAfterResetSelected(Rec); From 18f30fe0f959887a37177638d66baea2ccbeb4be Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 15:37:25 +0100 Subject: [PATCH 06/24] Updated reset message. --- businessCentral/app/src/Table.Table.al | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/businessCentral/app/src/Table.Table.al b/businessCentral/app/src/Table.Table.al index f5601db..7e3a985 100644 --- a/businessCentral/app/src/Table.Table.al +++ b/businessCentral/app/src/Table.Table.al @@ -97,7 +97,8 @@ table 82561 "ADLSE Table" TableNotNormalErr: Label 'Table %1 is not a normal table.', Comment = '%1: caption of table'; TableExportingDataErr: Label 'Data is being executed for table %1. Please wait for the export to finish before making changes.', Comment = '%1: table caption'; TableCannotBeExportedErr: Label 'The table %1 cannot be exported because of the following error. \%2', Comment = '%1: Table ID, %2: error text'; - TablesResetTxt: Label '%1 table(s) were reset.', Comment = '%1 = number of tables that were reset'; + TablesResetTxt: Label '%1 table(s) were reset %2', Comment = '%1 = number of tables that were reset, %2 = message if tables are exported'; + TableResetExportedTxt: Label 'and are exported to the lakehouse. Please run the notebook first.'; procedure FieldsChosen(): Integer var @@ -192,7 +193,10 @@ table 82561 "ADLSE Table" Counter += 1; until Rec.Next() = 0; - Message(TablesResetTxt, Counter); + if (ADLSESetup."Delete Table") and (ADLSESetup."Storage Type" = ADLSESetup."Storage Type"::"Microsoft Fabric") then + Message(TablesResetTxt, Counter, TableResetExportedTxt) + else + Message(TablesResetTxt, Counter, '.'); end; local procedure CheckExportingOnlyValidFields() From c1ab4f9d168a73cdadb28444e4b50fca2e87bf1e Mon Sep 17 00:00:00 2001 From: Bertverbeek4PS Date: Tue, 27 Feb 2024 14:53:56 +0000 Subject: [PATCH 07/24] Updated AL-Go System Files --- .AL-Go/cloudDevEnv.ps1 | 4 +- .AL-Go/localDevEnv.ps1 | 4 +- .github/RELEASENOTES.copy.md | 42 ++ .../workflows/AddExistingAppOrTestApp.yaml | 25 +- .github/workflows/CICD.yaml | 97 ++++- .github/workflows/CreateApp.yaml | 31 +- .../CreateOnlineDevelopmentEnvironment.yaml | 39 +- .../workflows/CreatePerformanceTestApp.yaml | 37 +- .github/workflows/CreateRelease.yaml | 75 ++-- .github/workflows/CreateTestApp.yaml | 31 +- .github/workflows/Current.yaml | 16 +- .../DeployReferenceDocumentation.yaml | 71 ++++ .github/workflows/IncrementVersionNumber.yaml | 31 +- .github/workflows/NextMajor.yaml | 16 +- .github/workflows/NextMinor.yaml | 16 +- .github/workflows/PublishToEnvironment.yaml | 26 +- .github/workflows/PullRequestHandler.yaml | 19 +- .github/workflows/Troubleshooting.yaml | 37 ++ .../workflows/UpdateGitHubGoSystemFiles.yaml | 44 +- .github/workflows/_BuildALGoProject.yaml | 376 +++++++++--------- 20 files changed, 675 insertions(+), 362 deletions(-) create mode 100644 .github/workflows/DeployReferenceDocumentation.yaml create mode 100644 .github/workflows/Troubleshooting.yaml diff --git a/.AL-Go/cloudDevEnv.ps1 b/.AL-Go/cloudDevEnv.ps1 index 50ba88f..4008ebc 100644 --- a/.AL-Go/cloudDevEnv.ps1 +++ b/.AL-Go/cloudDevEnv.ps1 @@ -27,11 +27,11 @@ Write-Host -ForegroundColor Yellow @' $webClient = New-Object System.Net.WebClient $webClient.CachePolicy = New-Object System.Net.Cache.RequestCachePolicy -argumentList ([System.Net.Cache.RequestCacheLevel]::NoCacheNoStore) $webClient.Encoding = [System.Text.Encoding]::UTF8 -$GitHubHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.0/Github-Helper.psm1' +$GitHubHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.1/Github-Helper.psm1' Write-Host "Downloading GitHub Helper module from $GitHubHelperUrl" $GitHubHelperPath = "$([System.IO.Path]::GetTempFileName()).psm1" $webClient.DownloadFile($GitHubHelperUrl, $GitHubHelperPath) -$ALGoHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.0/AL-Go-Helper.ps1' +$ALGoHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.1/AL-Go-Helper.ps1' Write-Host "Downloading AL-Go Helper script from $ALGoHelperUrl" $ALGoHelperPath = "$([System.IO.Path]::GetTempFileName()).ps1" $webClient.DownloadFile($ALGoHelperUrl, $ALGoHelperPath) diff --git a/.AL-Go/localDevEnv.ps1 b/.AL-Go/localDevEnv.ps1 index 9d5d09f..819e0ad 100644 --- a/.AL-Go/localDevEnv.ps1 +++ b/.AL-Go/localDevEnv.ps1 @@ -30,11 +30,11 @@ Write-Host -ForegroundColor Yellow @' $webClient = New-Object System.Net.WebClient $webClient.CachePolicy = New-Object System.Net.Cache.RequestCachePolicy -argumentList ([System.Net.Cache.RequestCacheLevel]::NoCacheNoStore) $webClient.Encoding = [System.Text.Encoding]::UTF8 -$GitHubHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.0/Github-Helper.psm1' +$GitHubHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.1/Github-Helper.psm1' Write-Host "Downloading GitHub Helper module from $GitHubHelperUrl" $GitHubHelperPath = "$([System.IO.Path]::GetTempFileName()).psm1" $webClient.DownloadFile($GitHubHelperUrl, $GitHubHelperPath) -$ALGoHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.0/AL-Go-Helper.ps1' +$ALGoHelperUrl = 'https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.1/AL-Go-Helper.ps1' Write-Host "Downloading AL-Go Helper script from $ALGoHelperUrl" $ALGoHelperPath = "$([System.IO.Path]::GetTempFileName()).ps1" $webClient.DownloadFile($ALGoHelperUrl, $ALGoHelperPath) diff --git a/.github/RELEASENOTES.copy.md b/.github/RELEASENOTES.copy.md index 0ec44dc..2aa6e6b 100644 --- a/.github/RELEASENOTES.copy.md +++ b/.github/RELEASENOTES.copy.md @@ -1,3 +1,45 @@ +## v4.1 + +### New Settings +- `templateSha`: The SHA of the version of AL-Go currently used + +### New Actions +- `DumpWorkflowInfo`: Dump information about running workflow +- `Troubleshooting` : Run troubleshooting for repository + +### Update AL-Go System Files +Add another parameter when running Update AL-Go System Files, called downloadLatest, used to indicate whether to download latest version from template repository. Default value is true. +If false, the templateSha repository setting is used to download specific AL-Go System Files when calculating new files. + +### Issues +- Issue 782 Exclude '.altestrunner/' from template .gitignore +- Issue 823 Dependencies from prior build jobs are not included when using useProjectDependencies +- App artifacts for version 'latest' are now fetched from the latest CICD run that completed and successfully built all the projects for the corresponding branch. +- Issue 824 Utilize `useCompilerFolder` setting when creating an development environment for an AL-Go project. +- Issue 828 and 825 display warnings for secrets, which might cause AL-Go for GitHub to malfunction + +### New Settings + +- `alDoc` : JSON object with properties for the ALDoc reference document generation + - **continuousDeployment** = Determines if reference documentation will be deployed continuously as part of CI/CD. You can run the **Deploy Reference Documentation** workflow to deploy manually or on a schedule. (Default false) + - **deployToGitHubPages** = Determines whether or not the reference documentation site should be deployed to GitHub Pages for the repository. In order to deploy to GitHub Pages, GitHub Pages must be enabled and set to GitHub Actions. (Default true) + - **maxReleases** = Maximum number of releases to include in the reference documentation. (Default 3) + - **groupByProject** = Determines whether projects in multi-project repositories are used as folders in reference documentation + - **includeProjects** = An array of projects to include in the reference documentation. (Default all) + - **excludeProjects** = An array of projects to exclude in the reference documentation. (Default none)- + - **header** = Header for the documentation site. (Default: Documentation for...) + - **footer** = Footer for the documentation site. (Default: Made with...) + - **defaultIndexMD** = Markdown for the landing page of the documentation site. (Default: Reference documentation...) + - **defaultReleaseMD** = Markdown for the landing page of the release sites. (Default: Release reference documentation...) + - *Note that in header, footer, defaultIndexMD and defaultReleaseMD you can use the following placeholders: {REPOSITORY}, {VERSION}, {INDEXTEMPLATERELATIVEPATH}, {RELEASENOTES}* + +### New Workflows +- **Deploy Reference Documentation** is a workflow, which you can invoke manually or on a schedule to generate and deploy reference documentation using the aldoc tool, using the ALDoc setting properties described above. +- **Troubleshooting** is a workflow, which you can invoke manually to run troubleshooting on the repository and check for settings or secrets, containing illegal values. When creating issues on https://github.com/microsoft/AL-Go/issues, we might ask you to run the troubleshooter to help identify common problems. + +### Support for ALDoc reference documentation tool +ALDoc reference documentation tool is now supported for generating and deploying reference documentation for your projects either continuously or manually/scheduled. + ## v4.0 ### Removal of the InsiderSasToken diff --git a/.github/workflows/AddExistingAppOrTestApp.yaml b/.github/workflows/AddExistingAppOrTestApp.yaml index 112f07c..c12eeee 100644 --- a/.github/workflows/AddExistingAppOrTestApp.yaml +++ b/.github/workflows/AddExistingAppOrTestApp.yaml @@ -13,12 +13,13 @@ on: description: Direct Download Url of .app or .zip file required: true directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -34,26 +35,32 @@ env: jobs: AddExistingAppOrTestApp: + needs: [ ] runs-on: [ windows-latest ] steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0090" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -61,7 +68,7 @@ jobs: useGhTokenWorkflowForPush: '${{ github.event.inputs.useGhTokenWorkflow }}' - name: Add existing app - uses: microsoft/AL-Go-Actions/AddExistingApp@v4.0 + uses: microsoft/AL-Go-Actions/AddExistingApp@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} @@ -72,7 +79,7 @@ jobs: - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0090" diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 759c7a0..faf75a3 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -24,12 +24,15 @@ env: jobs: Initialization: + needs: [ ] runs-on: [ windows-latest ] outputs: telemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} environmentsMatrixJson: ${{ steps.DetermineDeploymentEnvironments.outputs.EnvironmentsMatrixJson }} environmentCount: ${{ steps.DetermineDeploymentEnvironments.outputs.EnvironmentCount }} deploymentEnvironmentsJson: ${{ steps.DetermineDeploymentEnvironments.outputs.DeploymentEnvironmentsJson }} + generateALDocArtifact: ${{ steps.DetermineDeploymentEnvironments.outputs.GenerateALDocArtifact }} + deployALDocArtifact: ${{ steps.DetermineDeploymentEnvironments.outputs.DeployALDocArtifact }} deliveryTargetsJson: ${{ steps.DetermineDeliveryTargets.outputs.DeliveryTargetsJson }} githubRunner: ${{ steps.ReadSettings.outputs.GitHubRunnerJson }} githubRunnerShell: ${{ steps.ReadSettings.outputs.GitHubRunnerShell }} @@ -38,6 +41,11 @@ jobs: buildOrderJson: ${{ steps.determineProjectsToBuild.outputs.BuildOrderJson }} workflowDepth: ${{ steps.DetermineWorkflowDepth.outputs.WorkflowDepth }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 with: @@ -45,14 +53,14 @@ jobs: - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0091" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell get: type @@ -64,22 +72,22 @@ jobs: - name: Determine Projects To Build id: determineProjectsToBuild - uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.0 + uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.1 with: shell: powershell maxBuildDepth: ${{ env.workflowDepth }} - name: Determine Delivery Target Secrets id: DetermineDeliveryTargetSecrets - uses: microsoft/AL-Go-Actions/DetermineDeliveryTargets@v4.0 + uses: microsoft/AL-Go-Actions/DetermineDeliveryTargets@v4.1 with: shell: powershell projectsJson: '${{ steps.determineProjectsToBuild.outputs.ProjectsJson }}' - checkContextSecrets: 'N' + checkContextSecrets: 'false' - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -87,17 +95,17 @@ jobs: - name: Determine Delivery Targets id: DetermineDeliveryTargets - uses: microsoft/AL-Go-Actions/DetermineDeliveryTargets@v4.0 + uses: microsoft/AL-Go-Actions/DetermineDeliveryTargets@v4.1 env: Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' with: shell: powershell projectsJson: '${{ steps.determineProjectsToBuild.outputs.ProjectsJson }}' - checkContextSecrets: 'Y' + checkContextSecrets: 'true' - name: Determine Deployment Environments id: DetermineDeploymentEnvironments - uses: microsoft/AL-Go-Actions/DetermineDeploymentEnvironments@v4.0 + uses: microsoft/AL-Go-Actions/DetermineDeploymentEnvironments@v4.1 env: GITHUB_TOKEN: ${{ github.token }} with: @@ -106,24 +114,24 @@ jobs: type: 'CD' CheckForUpdates: - runs-on: [ windows-latest ] needs: [ Initialization ] + runs-on: [ windows-latest ] steps: - name: Checkout uses: actions/checkout@v3 - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell get: templateUrl - name: Check for updates to AL-Go system files - uses: microsoft/AL-Go-Actions/CheckForUpdates@v4.0 + uses: microsoft/AL-Go-Actions/CheckForUpdates@v4.1 with: shell: powershell - parentTelemetryScopeJson: ${{ needs.Initialization.outputs.telemetryScopeJson }} templateUrl: ${{ env.templateUrl }} + downloadLatest: true Build: needs: [ Initialization ] @@ -149,6 +157,53 @@ jobs: signArtifacts: true useArtifactCache: true + DeployALDoc: + needs: [ Initialization, Build ] + if: always() && needs.Build.result == 'Success' && needs.Initialization.outputs.generateALDocArtifact == 1 && github.ref_name == 'main' + runs-on: windows-latest + name: Deploy Reference Documentation + permissions: + contents: write + actions: read + pages: write + id-token: write + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Download artifacts + uses: actions/download-artifact@v3 + with: + path: '.artifacts' + + - name: Read settings + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 + with: + shell: powershell + + - name: Setup Pages + if: needs.Initialization.outputs.deployALDocArtifact == 1 + uses: actions/configure-pages@v3 + + - name: Build Reference Documentation + uses: microsoft/AL-Go-Actions/BuildReferenceDocumentation@v4.1 + with: + shell: powershell + artifacts: '.artifacts' + + - name: Upload pages artifact + uses: actions/upload-pages-artifact@v2 + with: + path: ".aldoc/_site/" + + - name: Deploy to GitHub Pages + if: needs.Initialization.outputs.deployALDocArtifact == 1 + id: deployment + uses: actions/deploy-pages@v2 + Deploy: needs: [ Initialization, Build ] if: always() && needs.Build.result == 'Success' && needs.Initialization.outputs.environmentCount > 0 @@ -168,7 +223,7 @@ jobs: path: '.artifacts' - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell @@ -181,7 +236,7 @@ jobs: - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -189,7 +244,7 @@ jobs: - name: Deploy id: Deploy - uses: microsoft/AL-Go-Actions/Deploy@v4.0 + uses: microsoft/AL-Go-Actions/Deploy@v4.1 env: Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' with: @@ -218,20 +273,20 @@ jobs: path: '.artifacts' - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} getSecrets: '${{ matrix.deliveryTarget }}Context' - name: Deliver - uses: microsoft/AL-Go-Actions/Deliver@v4.0 + uses: microsoft/AL-Go-Actions/Deliver@v4.1 env: Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' with: @@ -242,16 +297,16 @@ jobs: artifacts: '.artifacts' PostProcess: + needs: [ Initialization, Build, Deploy, Deliver, DeployALDoc ] if: (!cancelled()) runs-on: [ windows-latest ] - needs: [ Initialization, Build, Deploy, Deliver ] steps: - name: Checkout uses: actions/checkout@v3 - name: Finalize the workflow id: PostProcess - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0091" diff --git a/.github/workflows/CreateApp.yaml b/.github/workflows/CreateApp.yaml index 97c3285..bc8183b 100644 --- a/.github/workflows/CreateApp.yaml +++ b/.github/workflows/CreateApp.yaml @@ -19,16 +19,17 @@ on: description: ID range (from..to) required: true sampleCode: - description: Include Sample code (Y/N) - required: false - default: 'Y' + description: Include Sample code? + type: boolean + default: true directCommit: - description: Direct COMMIT (Y/N) - required: false - default: "N" + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -44,27 +45,33 @@ env: jobs: CreateApp: + needs: [ ] runs-on: [ windows-latest ] steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0092" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell get: type - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -72,7 +79,7 @@ jobs: useGhTokenWorkflowForPush: '${{ github.event.inputs.useGhTokenWorkflow }}' - name: Creating a new app - uses: microsoft/AL-Go-Actions/CreateApp@v4.0 + uses: microsoft/AL-Go-Actions/CreateApp@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} @@ -87,7 +94,7 @@ jobs: - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0092" diff --git a/.github/workflows/CreateOnlineDevelopmentEnvironment.yaml b/.github/workflows/CreateOnlineDevelopmentEnvironment.yaml index 391d83f..9536771 100644 --- a/.github/workflows/CreateOnlineDevelopmentEnvironment.yaml +++ b/.github/workflows/CreateOnlineDevelopmentEnvironment.yaml @@ -13,16 +13,17 @@ on: description: Name of the online environment required: true reUseExistingEnvironment: - description: Reuse environment if it exists - required: false - default: 'N' + description: Reuse environment if it exists? + type: boolean + default: false directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -38,6 +39,7 @@ env: jobs: Initialization: + needs: [ ] runs-on: [ windows-latest ] outputs: deviceCode: ${{ steps.authenticate.outputs.deviceCode }} @@ -45,25 +47,30 @@ jobs: githubRunner: ${{ steps.ReadSettings.outputs.GitHubRunnerJson }} githubRunnerShell: ${{ steps.ReadSettings.outputs.GitHubRunnerShell }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0093" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -82,7 +89,7 @@ jobs: Write-Host "AdminCenterApiCredentials not provided, initiating Device Code flow" $ALGoHelperPath = "$([System.IO.Path]::GetTempFileName()).ps1" $webClient = New-Object System.Net.WebClient - $webClient.DownloadFile('https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.0/AL-Go-Helper.ps1', $ALGoHelperPath) + $webClient.DownloadFile('https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.1/AL-Go-Helper.ps1', $ALGoHelperPath) . $ALGoHelperPath DownloadAndImportBcContainerHelper $authContext = New-BcAuthContext -includeDeviceLogin -deviceLoginTimeout ([TimeSpan]::FromSeconds(0)) @@ -91,12 +98,12 @@ jobs: } CreateDevelopmentEnvironment: + needs: [ Initialization ] runs-on: ${{ fromJson(needs.Initialization.outputs.githubRunner) }} defaults: run: shell: ${{ needs.Initialization.outputs.githubRunnerShell }} name: Create Development Environment - needs: [ Initialization ] env: deviceCode: ${{ needs.Initialization.outputs.deviceCode }} steps: @@ -104,13 +111,13 @@ jobs: uses: actions/checkout@v3 - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -129,7 +136,7 @@ jobs: Add-Content -Encoding UTF8 -Path $env:GITHUB_OUTPUT -value "adminCenterApiCredentials=$adminCenterApiCredentials" - name: Create Development Environment - uses: microsoft/AL-Go-Actions/CreateDevelopmentEnvironment@v4.0 + uses: microsoft/AL-Go-Actions/CreateDevelopmentEnvironment@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} @@ -142,7 +149,7 @@ jobs: - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0093" diff --git a/.github/workflows/CreatePerformanceTestApp.yaml b/.github/workflows/CreatePerformanceTestApp.yaml index 5f49f87..6c76080 100644 --- a/.github/workflows/CreatePerformanceTestApp.yaml +++ b/.github/workflows/CreatePerformanceTestApp.yaml @@ -21,20 +21,21 @@ on: required: true default: '50000..99999' sampleCode: - description: Include Sample code (Y/N) - required: false - default: 'Y' + description: Include Sample code? + type: boolean + default: true sampleSuite: - description: Include Sample BCPT Suite (Y/N) - required: false - default: 'Y' + description: Include Sample BCPT Suite? + type: boolean + default: true directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -50,26 +51,32 @@ env: jobs: CreatePerformanceTestApp: + needs: [ ] runs-on: [ windows-latest ] steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0102" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -77,7 +84,7 @@ jobs: useGhTokenWorkflowForPush: '${{ github.event.inputs.useGhTokenWorkflow }}' - name: Creating a new test app - uses: microsoft/AL-Go-Actions/CreateApp@v4.0 + uses: microsoft/AL-Go-Actions/CreateApp@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} @@ -93,7 +100,7 @@ jobs: - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0102" diff --git a/.github/workflows/CreateRelease.yaml b/.github/workflows/CreateRelease.yaml index fc5e855..8ecb3ea 100644 --- a/.github/workflows/CreateRelease.yaml +++ b/.github/workflows/CreateRelease.yaml @@ -16,28 +16,29 @@ on: required: true default: '' prerelease: - description: Prerelease (Y/N) - required: false - default: 'N' + description: Prerelease? + type: boolean + default: false draft: - description: Draft (Y/N) - required: false - default: 'N' + description: Draft? + type: boolean + default: false createReleaseBranch: - description: Create Release Branch (Y/N) - required: false - default: 'N' + description: Create Release Branch? + type: boolean + default: false updateVersionNumber: description: New Version Number in main branch. Use Major.Minor for absolute change, use +Major.Minor for incremental change. required: false default: '' directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -56,6 +57,7 @@ env: jobs: CreateRelease: + needs: [ ] runs-on: [ windows-latest ] outputs: telemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} @@ -64,35 +66,40 @@ jobs: commitish: ${{ steps.analyzeartifacts.outputs.commitish }} releaseBranch: ${{ steps.createreleasenotes.outputs.releaseBranch }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0094" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell get: templateUrl,repoName - name: Determine Projects id: determineProjects - uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.0 + uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.1 with: shell: powershell - name: Check for updates to AL-Go system files - uses: microsoft/AL-Go-Actions/CheckForUpdates@v4.0 + uses: microsoft/AL-Go-Actions/CheckForUpdates@v4.1 with: shell: powershell - parentTelemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} templateUrl: ${{ env.templateUrl }} + downloadLatest: true - name: Analyze Artifacts id: analyzeartifacts @@ -173,7 +180,7 @@ jobs: - name: Prepare release notes id: createreleasenotes - uses: microsoft/AL-Go-Actions/CreateReleaseNotes@v4.0 + uses: microsoft/AL-Go-Actions/CreateReleaseNotes@v4.1 with: shell: powershell parentTelemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} @@ -195,8 +202,8 @@ jobs: tag_name: '${{ github.event.inputs.tag }}', name: '${{ github.event.inputs.name }}', body: bodyMD.replaceAll('\\n','\n').replaceAll('%0A','\n').replaceAll('%0D','\n').replaceAll('%25','%'), - draft: ${{ github.event.inputs.draft=='Y' }}, - prerelease: ${{ github.event.inputs.prerelease=='Y' }}, + draft: ${{ github.event.inputs.draft=='true' }}, + prerelease: ${{ github.event.inputs.prerelease=='true' }}, make_latest: 'legacy', target_commitish: '${{ steps.analyzeartifacts.outputs.commitish }}' }); @@ -206,8 +213,8 @@ jobs: core.setOutput('releaseId', releaseId); UploadArtifacts: - runs-on: [ windows-latest ] needs: [ CreateRelease ] + runs-on: [ windows-latest ] strategy: matrix: ${{ fromJson(needs.CreateRelease.outputs.artifacts) }} fail-fast: true @@ -216,13 +223,13 @@ jobs: uses: actions/checkout@v3 - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -259,7 +266,7 @@ jobs: }); - name: Deliver to NuGet - uses: microsoft/AL-Go-Actions/Deliver@v4.0 + uses: microsoft/AL-Go-Actions/Deliver@v4.1 if: ${{ fromJson(steps.ReadSecrets.outputs.Secrets).nuGetContext != '' }} env: Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' @@ -272,7 +279,7 @@ jobs: atypes: 'Apps,TestApps' - name: Deliver to Storage - uses: microsoft/AL-Go-Actions/Deliver@v4.0 + uses: microsoft/AL-Go-Actions/Deliver@v4.1 if: ${{ fromJson(steps.ReadSecrets.outputs.Secrets).storageContext != '' }} env: Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' @@ -285,9 +292,9 @@ jobs: atypes: 'Apps,TestApps,Dependencies' CreateReleaseBranch: - if: ${{ github.event.inputs.createReleaseBranch=='Y' }} - runs-on: [ windows-latest ] needs: [ CreateRelease, UploadArtifacts ] + if: ${{ github.event.inputs.createReleaseBranch=='true' }} + runs-on: [ windows-latest ] steps: - name: Checkout uses: actions/checkout@v3 @@ -304,18 +311,18 @@ jobs: git push origin ${{ needs.CreateRelease.outputs.releaseBranch }} UpdateVersionNumber: + needs: [ CreateRelease, UploadArtifacts ] if: ${{ github.event.inputs.updateVersionNumber!='' }} runs-on: [ windows-latest ] - needs: [ CreateRelease, UploadArtifacts ] steps: - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -323,7 +330,7 @@ jobs: useGhTokenWorkflowForPush: '${{ github.event.inputs.useGhTokenWorkflow }}' - name: Update Version Number - uses: microsoft/AL-Go-Actions/IncrementVersionNumber@v4.0 + uses: microsoft/AL-Go-Actions/IncrementVersionNumber@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} @@ -332,16 +339,16 @@ jobs: directCommit: ${{ github.event.inputs.directCommit }} PostProcess: + needs: [ CreateRelease, UploadArtifacts, CreateReleaseBranch, UpdateVersionNumber ] if: always() runs-on: [ windows-latest ] - needs: [ CreateRelease, UploadArtifacts, CreateReleaseBranch, UpdateVersionNumber ] steps: - name: Checkout uses: actions/checkout@v3 - name: Finalize the workflow id: PostProcess - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0094" diff --git a/.github/workflows/CreateTestApp.yaml b/.github/workflows/CreateTestApp.yaml index cac7067..5a744c2 100644 --- a/.github/workflows/CreateTestApp.yaml +++ b/.github/workflows/CreateTestApp.yaml @@ -21,16 +21,17 @@ on: required: true default: '50000..99999' sampleCode: - description: Include Sample code (Y/N) - required: false - default: 'Y' + description: Include Sample code? + type: boolean + default: true directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -46,26 +47,32 @@ env: jobs: CreateTestApp: + needs: [ ] runs-on: [ windows-latest ] steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0095" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -73,7 +80,7 @@ jobs: useGhTokenWorkflowForPush: '${{ github.event.inputs.useGhTokenWorkflow }}' - name: Creating a new test app - uses: microsoft/AL-Go-Actions/CreateApp@v4.0 + uses: microsoft/AL-Go-Actions/CreateApp@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} @@ -88,7 +95,7 @@ jobs: - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0095" diff --git a/.github/workflows/Current.yaml b/.github/workflows/Current.yaml index dabbacf..eac6f06 100644 --- a/.github/workflows/Current.yaml +++ b/.github/workflows/Current.yaml @@ -17,6 +17,7 @@ env: jobs: Initialization: + needs: [ ] runs-on: [ windows-latest ] outputs: telemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} @@ -27,6 +28,11 @@ jobs: buildOrderJson: ${{ steps.determineProjectsToBuild.outputs.BuildOrderJson }} workflowDepth: ${{ steps.DetermineWorkflowDepth.outputs.WorkflowDepth }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 with: @@ -34,14 +40,14 @@ jobs: - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0101" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell @@ -52,7 +58,7 @@ jobs: - name: Determine Projects To Build id: determineProjectsToBuild - uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.0 + uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.1 with: shell: powershell maxBuildDepth: ${{ env.workflowDepth }} @@ -80,16 +86,16 @@ jobs: artifactsNameSuffix: 'Current' PostProcess: + needs: [ Initialization, Build ] if: always() runs-on: [ windows-latest ] - needs: [ Initialization, Build ] steps: - name: Checkout uses: actions/checkout@v3 - name: Finalize the workflow id: PostProcess - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0101" diff --git a/.github/workflows/DeployReferenceDocumentation.yaml b/.github/workflows/DeployReferenceDocumentation.yaml new file mode 100644 index 0000000..8cc86b9 --- /dev/null +++ b/.github/workflows/DeployReferenceDocumentation.yaml @@ -0,0 +1,71 @@ +name: ' Deploy Reference Documentation' + +on: + workflow_dispatch: + +permissions: + contents: write + actions: read + pages: write + id-token: write + +defaults: + run: + shell: powershell + +env: + ALGoOrgSettings: ${{ vars.ALGoOrgSettings }} + ALGoRepoSettings: ${{ vars.ALGoRepoSettings }} + +jobs: + DeployALDoc: + runs-on: [ windows-latest ] + name: Deploy Reference Documentation + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Initialize the workflow + id: init + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 + with: + shell: powershell + eventId: "DO0097" + + - name: Read settings + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 + with: + shell: powershell + + - name: Determine Deployment Environments + id: DetermineDeploymentEnvironments + uses: microsoft/AL-Go-Actions/DetermineDeploymentEnvironments@v4.1 + env: + GITHUB_TOKEN: ${{ github.token }} + with: + shell: powershell + getEnvironments: 'github-pages' + type: 'Publish' + + - name: Setup Pages + if: steps.DetermineDeploymentEnvironments.outputs.deployALDocArtifact == 1 + uses: actions/configure-pages@v3 + + - name: Build Reference Documentation + uses: microsoft/AL-Go-Actions/BuildReferenceDocumentation@v4.1 + with: + shell: powershell + artifacts: 'latest' + + - name: Upload pages artifact + uses: actions/upload-pages-artifact@v2 + with: + path: ".aldoc/_site/" + + - name: Deploy to GitHub Pages + if: steps.DetermineDeploymentEnvironments.outputs.deployALDocArtifact == 1 + id: deployment + uses: actions/deploy-pages@v2 diff --git a/.github/workflows/IncrementVersionNumber.yaml b/.github/workflows/IncrementVersionNumber.yaml index 89e4680..94b64e4 100644 --- a/.github/workflows/IncrementVersionNumber.yaml +++ b/.github/workflows/IncrementVersionNumber.yaml @@ -5,20 +5,21 @@ run-name: "Increment Version Number in [${{ github.ref_name }}]" on: workflow_dispatch: inputs: - project: - description: Project name if the repository is setup for multiple projects (* for all projects) + projects: + description: Comma-separated list of project name patterns if the repository is setup for multiple projects (default is * for all projects) required: false default: '*' versionNumber: description: Updated Version Number. Use Major.Minor for absolute change, use +Major.Minor for incremental change. required: true directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false useGhTokenWorkflow: - description: Use GhTokenWorkflow for Pull Request/COMMIT + description: Use GhTokenWorkflow for PR/Commit? type: boolean + default: false permissions: contents: write @@ -34,26 +35,32 @@ env: jobs: IncrementVersionNumber: + needs: [ ] runs-on: [ windows-latest ] steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0096" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -61,18 +68,18 @@ jobs: useGhTokenWorkflowForPush: '${{ github.event.inputs.useGhTokenWorkflow }}' - name: Increment Version Number - uses: microsoft/AL-Go-Actions/IncrementVersionNumber@v4.0 + uses: microsoft/AL-Go-Actions/IncrementVersionNumber@v4.1 with: shell: powershell token: ${{ steps.ReadSecrets.outputs.TokenForPush }} parentTelemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} - project: ${{ github.event.inputs.project }} + projects: ${{ github.event.inputs.projects }} versionNumber: ${{ github.event.inputs.versionNumber }} directCommit: ${{ github.event.inputs.directCommit }} - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0096" diff --git a/.github/workflows/NextMajor.yaml b/.github/workflows/NextMajor.yaml index 191a9ce..81af580 100644 --- a/.github/workflows/NextMajor.yaml +++ b/.github/workflows/NextMajor.yaml @@ -17,6 +17,7 @@ env: jobs: Initialization: + needs: [ ] runs-on: [ windows-latest ] outputs: telemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} @@ -27,6 +28,11 @@ jobs: buildOrderJson: ${{ steps.determineProjectsToBuild.outputs.BuildOrderJson }} workflowDepth: ${{ steps.DetermineWorkflowDepth.outputs.WorkflowDepth }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 with: @@ -34,14 +40,14 @@ jobs: - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0099" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell @@ -52,7 +58,7 @@ jobs: - name: Determine Projects To Build id: determineProjectsToBuild - uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.0 + uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.1 with: shell: powershell maxBuildDepth: ${{ env.workflowDepth }} @@ -80,16 +86,16 @@ jobs: artifactsNameSuffix: 'NextMajor' PostProcess: + needs: [ Initialization, Build ] if: always() runs-on: [ windows-latest ] - needs: [ Initialization, Build ] steps: - name: Checkout uses: actions/checkout@v3 - name: Finalize the workflow id: PostProcess - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0099" diff --git a/.github/workflows/NextMinor.yaml b/.github/workflows/NextMinor.yaml index 267f179..1bd4412 100644 --- a/.github/workflows/NextMinor.yaml +++ b/.github/workflows/NextMinor.yaml @@ -17,6 +17,7 @@ env: jobs: Initialization: + needs: [ ] runs-on: [ windows-latest ] outputs: telemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} @@ -27,6 +28,11 @@ jobs: buildOrderJson: ${{ steps.determineProjectsToBuild.outputs.BuildOrderJson }} workflowDepth: ${{ steps.DetermineWorkflowDepth.outputs.WorkflowDepth }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 with: @@ -34,14 +40,14 @@ jobs: - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0100" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell @@ -52,7 +58,7 @@ jobs: - name: Determine Projects To Build id: determineProjectsToBuild - uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.0 + uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.1 with: shell: powershell maxBuildDepth: ${{ env.workflowDepth }} @@ -80,16 +86,16 @@ jobs: artifactsNameSuffix: 'NextMinor' PostProcess: + needs: [ Initialization, Build ] if: always() runs-on: [ windows-latest ] - needs: [ Initialization, Build ] steps: - name: Checkout uses: actions/checkout@v3 - name: Finalize the workflow id: PostProcess - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0100" diff --git a/.github/workflows/PublishToEnvironment.yaml b/.github/workflows/PublishToEnvironment.yaml index 9fe83ed..a94a316 100644 --- a/.github/workflows/PublishToEnvironment.yaml +++ b/.github/workflows/PublishToEnvironment.yaml @@ -25,6 +25,7 @@ env: jobs: Initialization: + needs: [ ] runs-on: [ windows-latest ] outputs: telemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} @@ -33,25 +34,30 @@ jobs: deploymentEnvironmentsJson: ${{ steps.DetermineDeploymentEnvironments.outputs.DeploymentEnvironmentsJson }} deviceCode: ${{ steps.Authenticate.outputs.deviceCode }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0097" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Determine Deployment Environments id: DetermineDeploymentEnvironments - uses: microsoft/AL-Go-Actions/DetermineDeploymentEnvironments@v4.0 + uses: microsoft/AL-Go-Actions/DetermineDeploymentEnvironments@v4.1 env: GITHUB_TOKEN: ${{ github.token }} with: @@ -69,7 +75,7 @@ jobs: - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 if: steps.DetermineDeploymentEnvironments.outputs.UnknownEnvironment == 1 with: shell: powershell @@ -101,7 +107,7 @@ jobs: Write-Host "No AuthContext provided for $envName, initiating Device Code flow" $ALGoHelperPath = "$([System.IO.Path]::GetTempFileName()).ps1" $webClient = New-Object System.Net.WebClient - $webClient.DownloadFile('https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.0/AL-Go-Helper.ps1', $ALGoHelperPath) + $webClient.DownloadFile('https://raw.githubusercontent.com/microsoft/AL-Go-Actions/v4.1/AL-Go-Helper.ps1', $ALGoHelperPath) . $ALGoHelperPath DownloadAndImportBcContainerHelper $authContext = New-BcAuthContext -includeDeviceLogin -deviceLoginTimeout ([TimeSpan]::FromSeconds(0)) @@ -132,13 +138,13 @@ jobs: Add-Content -Encoding UTF8 -Path $env:GITHUB_OUTPUT -Value "envName=$envName" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -146,7 +152,7 @@ jobs: - name: Deploy id: Deploy - uses: microsoft/AL-Go-Actions/Deploy@v4.0 + uses: microsoft/AL-Go-Actions/Deploy@v4.1 env: Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' with: @@ -157,16 +163,16 @@ jobs: deploymentEnvironmentsJson: ${{ needs.Initialization.outputs.deploymentEnvironmentsJson }} PostProcess: + needs: [ Initialization, Deploy ] if: always() runs-on: [ windows-latest ] - needs: [ Initialization, Deploy ] steps: - name: Checkout uses: actions/checkout@v3 - name: Finalize the workflow id: PostProcess - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0097" diff --git a/.github/workflows/PullRequestHandler.yaml b/.github/workflows/PullRequestHandler.yaml index b84dffb..acb32f7 100644 --- a/.github/workflows/PullRequestHandler.yaml +++ b/.github/workflows/PullRequestHandler.yaml @@ -27,7 +27,7 @@ jobs: if: (github.event.pull_request.base.repo.full_name != github.event.pull_request.head.repo.full_name) && (github.event_name != 'pull_request') runs-on: [ windows-latest ] steps: - - uses: microsoft/AL-Go-Actions/VerifyPRChanges@v4.0 + - uses: microsoft/AL-Go-Actions/VerifyPRChanges@v4.1 Initialization: needs: [ PregateCheck ] @@ -40,8 +40,14 @@ jobs: projects: ${{ steps.determineProjectsToBuild.outputs.ProjectsJson }} projectDependenciesJson: ${{ steps.determineProjectsToBuild.outputs.ProjectDependenciesJson }} buildOrderJson: ${{ steps.determineProjectsToBuild.outputs.BuildOrderJson }} + baselineWorkflowRunId: ${{ steps.determineProjectsToBuild.outputs.BaselineWorkflowRunId }} workflowDepth: ${{ steps.DetermineWorkflowDepth.outputs.WorkflowDepth }} steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 with: @@ -50,14 +56,14 @@ jobs: - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0104" - name: Read settings id: ReadSettings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell @@ -68,7 +74,7 @@ jobs: - name: Determine Projects To Build id: determineProjectsToBuild - uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.0 + uses: microsoft/AL-Go-Actions/DetermineProjectsToBuild@v4.1 with: shell: powershell maxBuildDepth: ${{ env.workflowDepth }} @@ -92,19 +98,20 @@ jobs: projectName: ${{ matrix.projectName }} buildMode: ${{ matrix.buildMode }} projectDependenciesJson: ${{ needs.Initialization.outputs.projectDependenciesJson }} + baselineWorkflowRunId: ${{ needs.Initialization.outputs.baselineWorkflowRunId }} secrets: 'licenseFileUrl,keyVaultCertificateUrl,*keyVaultCertificatePassword,keyVaultClientId,gitHubPackagesContext,applicationInsightsConnectionString' publishThisBuildArtifacts: ${{ needs.Initialization.outputs.workflowDepth > 1 }} artifactsNameSuffix: 'PR${{ github.event.number }}' StatusCheck: - runs-on: [ windows-latest ] needs: [ Initialization, Build ] if: (!cancelled()) + runs-on: [ windows-latest ] name: Pull Request Status Check steps: - name: Pull Request Status Check id: PullRequestStatusCheck - uses: microsoft/AL-Go-Actions/PullRequestStatusCheck@v4.0 + uses: microsoft/AL-Go-Actions/PullRequestStatusCheck@v4.1 env: GITHUB_TOKEN: ${{ github.token }} with: diff --git a/.github/workflows/Troubleshooting.yaml b/.github/workflows/Troubleshooting.yaml new file mode 100644 index 0000000..94de8aa --- /dev/null +++ b/.github/workflows/Troubleshooting.yaml @@ -0,0 +1,37 @@ +name: 'Troubleshooting' + +on: + workflow_dispatch: + inputs: + displayNameOfSecrets: + description: Display the name (not the value) of secrets available to the repository + type: boolean + default: false + +permissions: + contents: read + actions: read + +defaults: + run: + shell: powershell + +env: + ALGoOrgSettings: ${{ vars.ALGoOrgSettings }} + ALGoRepoSettings: ${{ vars.ALGoRepoSettings }} + +jobs: + Troubleshooting: + runs-on: [ windows-latest ] + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: true + + - name: Troubleshooting + uses: microsoft/AL-Go-Actions/Troubleshooting@v4.1 + with: + shell: powershell + gitHubSecrets: ${{ toJson(secrets) }} + displayNameOfSecrets: ${{ github.event.inputs.displayNameOfSecrets }} diff --git a/.github/workflows/UpdateGitHubGoSystemFiles.yaml b/.github/workflows/UpdateGitHubGoSystemFiles.yaml index 1e21179..4f8d8a3 100644 --- a/.github/workflows/UpdateGitHubGoSystemFiles.yaml +++ b/.github/workflows/UpdateGitHubGoSystemFiles.yaml @@ -7,10 +7,14 @@ on: description: Template Repository URL (current is https://github.com/microsoft/AL-Go-PTE@main) required: false default: '' + downloadLatest: + description: Download latest from template repository + type: boolean + default: true directCommit: - description: Direct COMMIT (Y/N) - required: false - default: 'N' + description: Direct Commit? + type: boolean + default: false permissions: contents: read @@ -25,27 +29,33 @@ env: jobs: UpdateALGoSystemFiles: + needs: [ ] runs-on: [ windows-latest ] steps: + - name: Dump Workflow Information + uses: microsoft/AL-Go-Actions/DumpWorkflowInfo@v4.1 + with: + shell: powershell + - name: Checkout uses: actions/checkout@v3 - name: Initialize the workflow id: init - uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowInitialize@v4.1 with: shell: powershell eventId: "DO0098" - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 with: shell: powershell get: templateUrl - name: Read secrets id: ReadSecrets - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 with: shell: powershell gitHubSecrets: ${{ toJson(secrets) }} @@ -62,33 +72,37 @@ jobs: Add-Content -Encoding UTF8 -Path $env:GITHUB_ENV -Value "templateUrl=$templateUrl" } - - name: Calculate DirectCommit + - name: Calculate Input env: - directCommit: ${{ github.event.inputs.directCommit }} + directCommit: '${{ github.event.inputs.directCommit }}' + downloadLatest: ${{ github.event.inputs.downloadLatest }} eventName: ${{ github.event_name }} run: | $errorActionPreference = "Stop"; $ProgressPreference = "SilentlyContinue"; Set-StrictMode -Version 2.0 $directCommit = $ENV:directCommit + $downloadLatest = $ENV:downloadLatest Write-Host $ENV:eventName if ($ENV:eventName -eq 'schedule') { - Write-Host "Running Update AL-Go System Files on a schedule. Setting DirectCommit = Y" - $directCommit = 'Y' + Write-Host "Running Update AL-Go System Files on a schedule. Setting DirectCommit and DownloadLatest to true" + $directCommit = 'true' + $downloadLatest = 'true' } - Add-Content -Encoding UTF8 -Path $env:GITHUB_ENV -Value "DirectCommit=$directCommit" + Add-Content -Encoding UTF8 -Path $env:GITHUB_ENV -Value "directCommit=$directCommit" + Add-Content -Encoding UTF8 -Path $env:GITHUB_ENV -Value "downloadLatest=$downloadLatest" - name: Update AL-Go system files - uses: microsoft/AL-Go-Actions/CheckForUpdates@v4.0 + uses: microsoft/AL-Go-Actions/CheckForUpdates@v4.1 with: shell: powershell - parentTelemetryScopeJson: ${{ steps.init.outputs.telemetryScopeJson }} token: ${{ fromJson(steps.ReadSecrets.outputs.Secrets).ghTokenWorkflow }} - Update: Y + downloadLatest: ${{ env.downloadLatest }} + update: 'Y' templateUrl: ${{ env.templateUrl }} directCommit: ${{ env.directCommit }} - name: Finalize the workflow if: always() - uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.0 + uses: microsoft/AL-Go-Actions/WorkflowPostProcess@v4.1 with: shell: powershell eventId: "DO0098" diff --git a/.github/workflows/_BuildALGoProject.yaml b/.github/workflows/_BuildALGoProject.yaml index f0d8dae..2379302 100644 --- a/.github/workflows/_BuildALGoProject.yaml +++ b/.github/workflows/_BuildALGoProject.yaml @@ -36,6 +36,11 @@ on: description: Build mode used when building the artifacts required: true type: string + baselineWorkflowRunId: + description: ID of the baseline workflow run, from where to download the current project dependencies, in case they are not built in the current workflow run + required: false + default: '0' + type: string secrets: description: A comma-separated string with the names of the secrets, required for the workflow. required: false @@ -43,14 +48,12 @@ on: type: string publishThisBuildArtifacts: description: Flag indicating whether this build artifacts should be published - required: false - default: false type: boolean + default: false publishArtifacts: description: Flag indicating whether the artifacts should be published - required: false - default: false type: boolean + default: false artifactsNameSuffix: description: Suffix to add to the artifacts names required: false @@ -58,14 +61,12 @@ on: type: string signArtifacts: description: Flag indicating whether the apps should be signed - required: false - default: false type: boolean + default: false useArtifactCache: description: Flag determining whether to use the Artifacts Cache - required: false - default: false type: boolean + default: false parentTelemetryScopeJson: description: Specifies the telemetry scope for the telemetry signal required: false @@ -77,179 +78,192 @@ env: jobs: BuildALGoProject: + needs: [ ] runs-on: ${{ fromJson(inputs.runsOn) }} + defaults: + run: + shell: ${{ inputs.shell }} name: ${{ inputs.projectName }} (${{ inputs.buildMode }}) steps: - - name: Checkout - uses: actions/checkout@v3 - with: - ref: ${{ inputs.checkoutRef }} - lfs: true - - - name: Read settings - uses: microsoft/AL-Go-Actions/ReadSettings@v4.0 - with: - shell: ${{ inputs.shell }} - project: ${{ inputs.project }} - get: useCompilerFolder,keyVaultCodesignCertificateName,doNotSignApps,doNotRunTests,artifact - - - name: Read secrets - id: ReadSecrets - if: github.event_name != 'pull_request' - uses: microsoft/AL-Go-Actions/ReadSecrets@v4.0 - with: - shell: ${{ inputs.shell }} - gitHubSecrets: ${{ toJson(secrets) }} - getSecrets: '${{ inputs.secrets }},appDependencyProbingPathsSecrets' - - - name: Determine ArtifactUrl - uses: microsoft/AL-Go-Actions/DetermineArtifactUrl@v4.0 - id: determineArtifactUrl - with: - shell: ${{ inputs.shell }} - parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} - project: ${{ inputs.project }} - - - name: Cache Business Central Artifacts - if: env.useCompilerFolder == 'True' && inputs.useArtifactCache && env.artifactCacheKey - uses: actions/cache@v3 - with: - path: .artifactcache - key: ${{ env.artifactCacheKey }} - - - name: Download Project Dependencies - id: DownloadProjectDependencies - uses: microsoft/AL-Go-Actions/DownloadProjectDependencies@v4.0 - env: - Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' - with: - shell: ${{ inputs.shell }} - project: ${{ inputs.project }} - buildMode: ${{ inputs.buildMode }} - projectsDependenciesJson: ${{ inputs.projectDependenciesJson }} - - - name: Run pipeline - id: RunPipeline - uses: microsoft/AL-Go-Actions/RunPipeline@v4.0 - env: - Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' - BuildMode: ${{ inputs.buildMode }} - with: - shell: ${{ inputs.shell }} - parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} - artifact: ${{ env.artifact }} - project: ${{ inputs.project }} - buildMode: ${{ inputs.buildMode }} - installAppsJson: ${{ steps.DownloadProjectDependencies.outputs.DownloadedApps }} - installTestAppsJson: ${{ steps.DownloadProjectDependencies.outputs.DownloadedTestApps }} - - - name: Sign - if: inputs.signArtifacts && env.doNotSignApps == 'False' && env.keyVaultCodesignCertificateName != '' - id: sign - uses: microsoft/AL-Go-Actions/Sign@v4.0 - with: - shell: ${{ needs.Initialization.outputs.githubRunnerShell }} - azureCredentialsJson: ${{ secrets.AZURE_CREDENTIALS }} - pathToFiles: '${{ inputs.project }}/.buildartifacts/Apps/*.app' - parentTelemetryScopeJson: ${{ needs.Initialization.outputs.telemetryScopeJson }} - - - name: Calculate Artifact names - id: calculateArtifactsNames - uses: microsoft/AL-Go-Actions/CalculateArtifactNames@v4.0 - if: success() || failure() - with: - shell: ${{ inputs.shell }} - project: ${{ inputs.project }} - buildMode: ${{ inputs.buildMode }} - suffix: ${{ inputs.artifactsNameSuffix }} - - - name: Upload thisbuild artifacts - apps - if: inputs.publishThisBuildArtifacts - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.calculateArtifactsNames.outputs.ThisBuildAppsArtifactsName }} - path: '${{ inputs.project }}/.buildartifacts/Apps/' - if-no-files-found: ignore - retention-days: 1 - - - name: Upload thisbuild artifacts - test apps - if: inputs.publishThisBuildArtifacts - uses: actions/upload-artifact@v3 - with: - name: ${{ steps.calculateArtifactsNames.outputs.ThisBuildTestAppsArtifactsName }} - path: '${{ inputs.project }}/.buildartifacts/TestApps/' - if-no-files-found: ignore - retention-days: 1 - - - name: Publish artifacts - apps - uses: actions/upload-artifact@v3 - if: inputs.publishArtifacts - with: - name: ${{ steps.calculateArtifactsNames.outputs.AppsArtifactsName }} - path: '${{ inputs.project }}/.buildartifacts/Apps/' - if-no-files-found: ignore - - - name: Publish artifacts - dependencies - uses: actions/upload-artifact@v3 - if: inputs.publishArtifacts - with: - name: ${{ steps.calculateArtifactsNames.outputs.DependenciesArtifactsName }} - path: '${{ inputs.project }}/.buildartifacts/Dependencies/' - if-no-files-found: ignore - - - name: Publish artifacts - test apps - uses: actions/upload-artifact@v3 - if: inputs.publishArtifacts - with: - name: ${{ steps.calculateArtifactsNames.outputs.TestAppsArtifactsName }} - path: '${{ inputs.project }}/.buildartifacts/TestApps/' - if-no-files-found: ignore - - - name: Publish artifacts - build output - uses: actions/upload-artifact@v3 - if: (success() || failure()) && (hashFiles(format('{0}/BuildOutput.txt',inputs.project)) != '') - with: - name: ${{ steps.calculateArtifactsNames.outputs.BuildOutputArtifactsName }} - path: '${{ inputs.project }}/BuildOutput.txt' - if-no-files-found: ignore - - - name: Publish artifacts - container event log - uses: actions/upload-artifact@v3 - if: (failure()) && (hashFiles(format('{0}/ContainerEventLog.evtx',inputs.project)) != '') - with: - name: ${{ steps.calculateArtifactsNames.outputs.ContainerEventLogArtifactsName }} - path: '${{ inputs.project }}/ContainerEventLog.evtx' - if-no-files-found: ignore - - - name: Publish artifacts - test results - uses: actions/upload-artifact@v3 - if: (success() || failure()) && (hashFiles(format('{0}/TestResults.xml',inputs.project)) != '') - with: - name: ${{ steps.calculateArtifactsNames.outputs.TestResultsArtifactsName }} - path: '${{ inputs.project }}/TestResults.xml' - if-no-files-found: ignore - - - name: Publish artifacts - bcpt test results - uses: actions/upload-artifact@v3 - if: (success() || failure()) && (hashFiles(format('{0}/bcptTestResults.json',inputs.project)) != '') - with: - name: ${{ steps.calculateArtifactsNames.outputs.BcptTestResultsArtifactsName }} - path: '${{ inputs.project }}/bcptTestResults.json' - if-no-files-found: ignore - - - name: Analyze Test Results - id: analyzeTestResults - if: (success() || failure()) && env.doNotRunTests == 'False' - uses: microsoft/AL-Go-Actions/AnalyzeTests@v4.0 - with: - shell: ${{ inputs.shell }} - parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} - project: ${{ inputs.project }} - - - name: Cleanup - if: always() - uses: microsoft/AL-Go-Actions/PipelineCleanup@v4.0 - with: - shell: ${{ inputs.shell }} - parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} - project: ${{ inputs.project }} + - name: Checkout + uses: actions/checkout@v3 + with: + ref: ${{ inputs.checkoutRef }} + lfs: true + + - name: Read settings + uses: microsoft/AL-Go-Actions/ReadSettings@v4.1 + with: + shell: ${{ inputs.shell }} + project: ${{ inputs.project }} + get: useCompilerFolder,keyVaultCodesignCertificateName,doNotSignApps,doNotRunTests,artifact,generateDependencyArtifact + + - name: Read secrets + id: ReadSecrets + if: github.event_name != 'pull_request' + uses: microsoft/AL-Go-Actions/ReadSecrets@v4.1 + with: + shell: ${{ inputs.shell }} + gitHubSecrets: ${{ toJson(secrets) }} + getSecrets: '${{ inputs.secrets }},appDependencyProbingPathsSecrets' + + - name: Determine ArtifactUrl + uses: microsoft/AL-Go-Actions/DetermineArtifactUrl@v4.1 + id: determineArtifactUrl + with: + shell: ${{ inputs.shell }} + parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} + project: ${{ inputs.project }} + + - name: Cache Business Central Artifacts + if: env.useCompilerFolder == 'True' && inputs.useArtifactCache && env.artifactCacheKey + uses: actions/cache@v3 + with: + path: .artifactcache + key: ${{ env.artifactCacheKey }} + + - name: Download Project Dependencies + id: DownloadProjectDependencies + uses: microsoft/AL-Go-Actions/DownloadProjectDependencies@v4.1 + env: + Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' + with: + shell: ${{ inputs.shell }} + project: ${{ inputs.project }} + buildMode: ${{ inputs.buildMode }} + projectsDependenciesJson: ${{ inputs.projectDependenciesJson }} + baselineWorkflowRunId: ${{ inputs.baselineWorkflowRunId }} + + - name: Build + uses: microsoft/AL-Go-Actions/RunPipeline@v4.1 + env: + Secrets: '${{ steps.ReadSecrets.outputs.Secrets }}' + BuildMode: ${{ inputs.buildMode }} + with: + shell: ${{ inputs.shell }} + parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} + artifact: ${{ env.artifact }} + project: ${{ inputs.project }} + buildMode: ${{ inputs.buildMode }} + installAppsJson: ${{ steps.DownloadProjectDependencies.outputs.DownloadedApps }} + installTestAppsJson: ${{ steps.DownloadProjectDependencies.outputs.DownloadedTestApps }} + + - name: Sign + if: inputs.signArtifacts && env.doNotSignApps == 'False' && env.keyVaultCodesignCertificateName != '' + id: sign + uses: microsoft/AL-Go-Actions/Sign@v4.1 + with: + shell: ${{ needs.Initialization.outputs.githubRunnerShell }} + azureCredentialsJson: ${{ secrets.AZURE_CREDENTIALS }} + pathToFiles: '${{ inputs.project }}/.buildartifacts/Apps/*.app' + parentTelemetryScopeJson: ${{ needs.Initialization.outputs.telemetryScopeJson }} + + - name: Calculate Artifact names + id: calculateArtifactsNames + uses: microsoft/AL-Go-Actions/CalculateArtifactNames@v4.1 + if: success() || failure() + with: + shell: ${{ inputs.shell }} + project: ${{ inputs.project }} + buildMode: ${{ inputs.buildMode }} + suffix: ${{ inputs.artifactsNameSuffix }} + + - name: Upload thisbuild artifacts - apps + if: inputs.publishThisBuildArtifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.calculateArtifactsNames.outputs.ThisBuildAppsArtifactsName }} + path: '${{ inputs.project }}/.buildartifacts/Apps/' + if-no-files-found: ignore + retention-days: 1 + + - name: Upload thisbuild artifacts - dependencies + if: inputs.publishThisBuildArtifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.calculateArtifactsNames.outputs.ThisBuildDependenciesArtifactsName }} + path: '${{ inputs.project }}/.buildartifacts/Dependencies/' + if-no-files-found: ignore + retention-days: 1 + + - name: Upload thisbuild artifacts - test apps + if: inputs.publishThisBuildArtifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.calculateArtifactsNames.outputs.ThisBuildTestAppsArtifactsName }} + path: '${{ inputs.project }}/.buildartifacts/TestApps/' + if-no-files-found: ignore + retention-days: 1 + + - name: Publish artifacts - apps + uses: actions/upload-artifact@v3 + if: inputs.publishArtifacts + with: + name: ${{ steps.calculateArtifactsNames.outputs.AppsArtifactsName }} + path: '${{ inputs.project }}/.buildartifacts/Apps/' + if-no-files-found: ignore + + - name: Publish artifacts - dependencies + uses: actions/upload-artifact@v3 + if: inputs.publishArtifacts && env.generateDependencyArtifact == 'True' + with: + name: ${{ steps.calculateArtifactsNames.outputs.DependenciesArtifactsName }} + path: '${{ inputs.project }}/.buildartifacts/Dependencies/' + if-no-files-found: ignore + + - name: Publish artifacts - test apps + uses: actions/upload-artifact@v3 + if: inputs.publishArtifacts + with: + name: ${{ steps.calculateArtifactsNames.outputs.TestAppsArtifactsName }} + path: '${{ inputs.project }}/.buildartifacts/TestApps/' + if-no-files-found: ignore + + - name: Publish artifacts - build output + uses: actions/upload-artifact@v3 + if: (success() || failure()) && (hashFiles(format('{0}/BuildOutput.txt',inputs.project)) != '') + with: + name: ${{ steps.calculateArtifactsNames.outputs.BuildOutputArtifactsName }} + path: '${{ inputs.project }}/BuildOutput.txt' + if-no-files-found: ignore + + - name: Publish artifacts - container event log + uses: actions/upload-artifact@v3 + if: (failure()) && (hashFiles(format('{0}/ContainerEventLog.evtx',inputs.project)) != '') + with: + name: ${{ steps.calculateArtifactsNames.outputs.ContainerEventLogArtifactsName }} + path: '${{ inputs.project }}/ContainerEventLog.evtx' + if-no-files-found: ignore + + - name: Publish artifacts - test results + uses: actions/upload-artifact@v3 + if: (success() || failure()) && (hashFiles(format('{0}/TestResults.xml',inputs.project)) != '') + with: + name: ${{ steps.calculateArtifactsNames.outputs.TestResultsArtifactsName }} + path: '${{ inputs.project }}/TestResults.xml' + if-no-files-found: ignore + + - name: Publish artifacts - bcpt test results + uses: actions/upload-artifact@v3 + if: (success() || failure()) && (hashFiles(format('{0}/bcptTestResults.json',inputs.project)) != '') + with: + name: ${{ steps.calculateArtifactsNames.outputs.BcptTestResultsArtifactsName }} + path: '${{ inputs.project }}/bcptTestResults.json' + if-no-files-found: ignore + + - name: Analyze Test Results + id: analyzeTestResults + if: (success() || failure()) && env.doNotRunTests == 'False' + uses: microsoft/AL-Go-Actions/AnalyzeTests@v4.1 + with: + shell: ${{ inputs.shell }} + parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} + project: ${{ inputs.project }} + + - name: Cleanup + if: always() + uses: microsoft/AL-Go-Actions/PipelineCleanup@v4.1 + with: + shell: ${{ inputs.shell }} + parentTelemetryScopeJson: ${{ inputs.parentTelemetryScopeJson }} + project: ${{ inputs.project }} From 93a257a355dfedeb17ecc235985a2dab961f218d Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 16:14:47 +0100 Subject: [PATCH 08/24] Add validation for maximum requests field --- businessCentral/app/src/Setup.Table.al | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/businessCentral/app/src/Setup.Table.al b/businessCentral/app/src/Setup.Table.al index 9744ae7..f428ef3 100644 --- a/businessCentral/app/src/Setup.Table.al +++ b/businessCentral/app/src/Setup.Table.al @@ -129,6 +129,21 @@ table 82560 "ADLSE Setup" Error(ErrorInfo.Create(NoSchemaExportedErr, true)); end; } + field(55; "Maximum Requests"; Integer) + { + Caption = 'Maximum requests'; + + trigger OnValidate() + begin + if Rec."Maximum Requests" > 10 then begin + MaxReqErrorInfo.DataClassification := DataClassification::SystemMetadata; + MaxReqErrorInfo.ErrorType := ErrorType::Client; + MaxReqErrorInfo.Verbosity := Verbosity::Error; + MaxReqErrorInfo.Message := MaximumRequestsErr; + Error(MaxReqErrorInfo); + end; + end; + } } keys @@ -140,11 +155,13 @@ table 82560 "ADLSE Setup" } var + MaxReqErrorInfo: ErrorInfo; ContainerNameIncorrectFormatErr: Label 'The container name is in an incorrect format.'; AccountNameIncorrectFormatErr: Label 'The account name is in an incorrect format.'; RecordDoesNotExistErr: Label 'No record on this table exists.'; PrimaryKeyValueLbl: Label '0', Locked = true; NoSchemaExportedErr: Label 'Schema already exported. Please perform the action "clear schema export date" before changing the schema.'; + MaximumRequestsErr: Label 'Please enter a value smaller than 10 for maximum requests.'; local procedure TextCharactersOtherThan(String: Text; CharString: Text): Boolean var From d5c4e4f357777143e97c958065af118db5f3d588 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 16:25:33 +0100 Subject: [PATCH 09/24] Update ADLSE Setup with Maximum Retries*** ***Add Maximum Retries field to ADLSE Setup Page and Table*** ***Fix validation error for Maximum Retries field --- businessCentral/app/src/Http.Codeunit.al | 3 ++- businessCentral/app/src/Setup.Page.al | 4 ++++ businessCentral/app/src/Setup.Table.al | 11 ++++++----- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/businessCentral/app/src/Http.Codeunit.al b/businessCentral/app/src/Http.Codeunit.al index bce589b..de27160 100644 --- a/businessCentral/app/src/Http.Codeunit.al +++ b/businessCentral/app/src/Http.Codeunit.al @@ -112,7 +112,8 @@ codeunit 82563 "ADLSE Http" MaxRetries: Integer; RetryCount: Integer; begin - MaxRetries := 5; + ADLSESetup.GetSingleton(); + MaxRetries := ADLSESetup."Maximum Retries"; RetryCount := 0; HttpClient.SetBaseAddress(Url); diff --git a/businessCentral/app/src/Setup.Page.al b/businessCentral/app/src/Setup.Page.al index ce4261e..9980ac2 100644 --- a/businessCentral/app/src/Setup.Page.al +++ b/businessCentral/app/src/Setup.Page.al @@ -153,6 +153,10 @@ page 82560 "ADLSE Setup" ToolTip = 'Specifies if the table will be deleted if a reset of the table is done.'; Editable = not AzureDataLake; } + field("Maximum retries"; Rec."Maximum Retries") + { + ToolTip = 'Specifies the maximum number of retries of a request. The maximum value is 10.'; + } } } part(Tables; "ADLSE Setup Tables") diff --git a/businessCentral/app/src/Setup.Table.al b/businessCentral/app/src/Setup.Table.al index 805b1e8..57333e4 100644 --- a/businessCentral/app/src/Setup.Table.al +++ b/businessCentral/app/src/Setup.Table.al @@ -133,17 +133,18 @@ table 82560 "ADLSE Setup" { Caption = 'Delete table'; } - field(55; "Maximum Requests"; Integer) + field(55; "Maximum Retries"; Integer) { - Caption = 'Maximum requests'; + Caption = 'Maximum retries'; + InitValue = 0; trigger OnValidate() begin - if Rec."Maximum Requests" > 10 then begin + if Rec."Maximum Retries" > 10 then begin MaxReqErrorInfo.DataClassification := DataClassification::SystemMetadata; MaxReqErrorInfo.ErrorType := ErrorType::Client; MaxReqErrorInfo.Verbosity := Verbosity::Error; - MaxReqErrorInfo.Message := MaximumRequestsErr; + MaxReqErrorInfo.Message := MaximumRetriesErr; Error(MaxReqErrorInfo); end; end; @@ -165,7 +166,7 @@ table 82560 "ADLSE Setup" RecordDoesNotExistErr: Label 'No record on this table exists.'; PrimaryKeyValueLbl: Label '0', Locked = true; NoSchemaExportedErr: Label 'Schema already exported. Please perform the action "clear schema export date" before changing the schema.'; - MaximumRequestsErr: Label 'Please enter a value smaller than 10 for maximum requests.'; + MaximumRetriesErr: Label 'Please enter a value that is equal or smaller than 10 for the maximum retries.'; local procedure TextCharactersOtherThan(String: Text; CharString: Text): Boolean var From 153e3e02a7800f93e2a329d1bacb6197a9036765 Mon Sep 17 00:00:00 2001 From: Bertverbeek4PS Date: Tue, 27 Feb 2024 17:03:43 +0000 Subject: [PATCH 10/24] Update AL-Go System Files --- .github/AL-Go-Settings.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/AL-Go-Settings.json b/.github/AL-Go-Settings.json index 17ac5bb..84409e2 100644 --- a/.github/AL-Go-Settings.json +++ b/.github/AL-Go-Settings.json @@ -1,4 +1,5 @@ { "type": "PTE", - "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main" + "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main", + "templateSha": "1e6ebb58fc629cc134759f996f9be9ab6ada7fb4" } From f6e89f58c21fc262ad061dfc0ccf4f8d1fced726 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 18:18:49 +0100 Subject: [PATCH 11/24] Commented out test folder in settings.json --- .AL-Go/settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index 81e6109..b9a4eec 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -4,7 +4,7 @@ "businessCentral\\app" ], "testFolders": [ - "businessCentral\\test" + //"businessCentral\\test" ], "bcptTestFolders": [], "repoVersion": "2.19" From e3671f88ae50cf869c15ef436888788167075dba Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 18:22:24 +0100 Subject: [PATCH 12/24] Remove commented out test folder path --- .AL-Go/settings.json | 1 - 1 file changed, 1 deletion(-) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index b9a4eec..1daa71a 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -4,7 +4,6 @@ "businessCentral\\app" ], "testFolders": [ - //"businessCentral\\test" ], "bcptTestFolders": [], "repoVersion": "2.19" From 7168d7218e41df44d2fc968cea091897c1a7fcbc Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 18:39:54 +0100 Subject: [PATCH 13/24] Add test folder and update AL-Go settings --- .AL-Go/settings.json | 1 + .github/AL-Go-Settings.json | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index 1daa71a..81e6109 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -4,6 +4,7 @@ "businessCentral\\app" ], "testFolders": [ + "businessCentral\\test" ], "bcptTestFolders": [], "repoVersion": "2.19" diff --git a/.github/AL-Go-Settings.json b/.github/AL-Go-Settings.json index 17ac5bb..a3dfcdc 100644 --- a/.github/AL-Go-Settings.json +++ b/.github/AL-Go-Settings.json @@ -1,4 +1,6 @@ { "type": "PTE", - "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main" + "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main", + "useCompilerFolder": true, + "doNotPublishApps": true } From 89221f57d613e864955a00ee034ea6c62e36f435 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 18:53:05 +0100 Subject: [PATCH 14/24] Update AL-Go-Settings.json: Changed useCompilerFolder and doNotPublishApps to false --- .github/AL-Go-Settings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/AL-Go-Settings.json b/.github/AL-Go-Settings.json index 5cae5e0..75e49c4 100644 --- a/.github/AL-Go-Settings.json +++ b/.github/AL-Go-Settings.json @@ -2,6 +2,6 @@ "type": "PTE", "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main", "templateSha": "1e6ebb58fc629cc134759f996f9be9ab6ada7fb4", - "useCompilerFolder": true, - "doNotPublishApps": true + "useCompilerFolder": false, + "doNotPublishApps": false } From 1ec978d44fbbfdc88b3494ad4bd065769a654399 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 19:37:56 +0100 Subject: [PATCH 15/24] Update AL-Go-Settings.json --- .github/AL-Go-Settings.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/AL-Go-Settings.json b/.github/AL-Go-Settings.json index 75e49c4..84409e2 100644 --- a/.github/AL-Go-Settings.json +++ b/.github/AL-Go-Settings.json @@ -1,7 +1,5 @@ { "type": "PTE", "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main", - "templateSha": "1e6ebb58fc629cc134759f996f9be9ab6ada7fb4", - "useCompilerFolder": false, - "doNotPublishApps": false + "templateSha": "1e6ebb58fc629cc134759f996f9be9ab6ada7fb4" } From 5848a46da18795c37b7f9b91f66b53abf6186c27 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Tue, 27 Feb 2024 19:40:06 +0100 Subject: [PATCH 16/24] Update settings.json --- .AL-Go/settings.json | 1 - 1 file changed, 1 deletion(-) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index 81e6109..1daa71a 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -4,7 +4,6 @@ "businessCentral\\app" ], "testFolders": [ - "businessCentral\\test" ], "bcptTestFolders": [], "repoVersion": "2.19" From e878bb38c1b437dcd3005055658ad03116ef2344 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Wed, 28 Feb 2024 12:15:15 +0100 Subject: [PATCH 17/24] Update platform and application versions in app.json --- businessCentral/app/app.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/businessCentral/app/app.json b/businessCentral/app/app.json index ef63b6f..ce6a102 100644 --- a/businessCentral/app/app.json +++ b/businessCentral/app/app.json @@ -12,8 +12,8 @@ "contextSensitiveHelpUrl": "https://docs.microsoft.com/dynamics365/business-central/", "logo": "../../.assets/bc2adls_logo.png", "screenshots": [], - "platform": "23.0.0.0", - "application": "23.0.0.0", + "platform": "22.0.0.0", + "application": "22.0.0.0", "resourceExposurePolicy": { "allowDebugging": true, "allowDownloadingSource": true, @@ -33,10 +33,10 @@ }, { "publisher": "The bc2adls team", - "name": "Tests for Azure Data Lake Storage Export", + "name": "Azure Data Lake Storage Export Test", "id": "03486619-1622-4261-ae0e-b366b3c96e3c" } ], "target": "Cloud", - "runtime": "12.0" + "runtime": "11.0" } From c1f2e90831e37e57b4beb52dbb53efb5bed14a99 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Wed, 28 Feb 2024 12:20:05 +0100 Subject: [PATCH 18/24] Update AL-Go-Settings and app.json versions --- .github/AL-Go-Settings.json | 4 +--- businessCentral/app/app.json | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/AL-Go-Settings.json b/.github/AL-Go-Settings.json index 75e49c4..84409e2 100644 --- a/.github/AL-Go-Settings.json +++ b/.github/AL-Go-Settings.json @@ -1,7 +1,5 @@ { "type": "PTE", "templateUrl": "https://github.com/microsoft/AL-Go-PTE@main", - "templateSha": "1e6ebb58fc629cc134759f996f9be9ab6ada7fb4", - "useCompilerFolder": false, - "doNotPublishApps": false + "templateSha": "1e6ebb58fc629cc134759f996f9be9ab6ada7fb4" } diff --git a/businessCentral/app/app.json b/businessCentral/app/app.json index ce6a102..c887140 100644 --- a/businessCentral/app/app.json +++ b/businessCentral/app/app.json @@ -12,8 +12,8 @@ "contextSensitiveHelpUrl": "https://docs.microsoft.com/dynamics365/business-central/", "logo": "../../.assets/bc2adls_logo.png", "screenshots": [], - "platform": "22.0.0.0", - "application": "22.0.0.0", + "platform": "23.0.0.0", + "application": "23.0.0.0", "resourceExposurePolicy": { "allowDebugging": true, "allowDownloadingSource": true, @@ -38,5 +38,5 @@ } ], "target": "Cloud", - "runtime": "11.0" + "runtime": "12.0" } From dc700d8d42106230d8e63ea2d5ec0f56dcdc1265 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Wed, 28 Feb 2024 12:21:16 +0100 Subject: [PATCH 19/24] Add test folder to settings.json --- .AL-Go/settings.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index 1daa71a..81e6109 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -4,6 +4,7 @@ "businessCentral\\app" ], "testFolders": [ + "businessCentral\\test" ], "bcptTestFolders": [], "repoVersion": "2.19" From e0c9f77981a66679d13df0fc59ef159e893aa5a6 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Wed, 28 Feb 2024 12:39:24 +0100 Subject: [PATCH 20/24] Update app.json: change name of Azure Data Lake Storage Export Test to Azure Data Lake Storage Export Tests --- businessCentral/app/app.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/businessCentral/app/app.json b/businessCentral/app/app.json index c887140..96acf49 100644 --- a/businessCentral/app/app.json +++ b/businessCentral/app/app.json @@ -33,7 +33,7 @@ }, { "publisher": "The bc2adls team", - "name": "Azure Data Lake Storage Export Test", + "name": "Azure Data Lake Storage Export Tests", "id": "03486619-1622-4261-ae0e-b366b3c96e3c" } ], From e82ab30e8aeb13750009fe2a983959f7db271db8 Mon Sep 17 00:00:00 2001 From: Bertverbeek4PS Date: Wed, 28 Feb 2024 12:36:56 +0000 Subject: [PATCH 21/24] New Version number 2.20 --- .AL-Go/settings.json | 2 +- businessCentral/app/app.json | 2 +- businessCentral/test/app.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index 81e6109..921a34f 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -7,5 +7,5 @@ "businessCentral\\test" ], "bcptTestFolders": [], - "repoVersion": "2.19" + "repoVersion": "2.20" } diff --git a/businessCentral/app/app.json b/businessCentral/app/app.json index 96acf49..dc126e3 100644 --- a/businessCentral/app/app.json +++ b/businessCentral/app/app.json @@ -4,7 +4,7 @@ "publisher": "The bc2adls team", "brief": "Sync data from Business Central to the Azure storage", "description": "Exports data in chosen tables to the Azure Data Lake and keeps it in sync by incremental updates. Before you use this tool, please read the SUPPORT.md file at https://github.com/microsoft/bc2adls.", - "version": "2.19.0.0", + "version": "2.20.0.0", "privacyStatement": "https://go.microsoft.com/fwlink/?LinkId=724009", "EULA": "https://go.microsoft.com/fwlink/?linkid=2009120", "help": "https://go.microsoft.com/fwlink/?LinkId=724011", diff --git a/businessCentral/test/app.json b/businessCentral/test/app.json index 7c8dcc4..377a178 100644 --- a/businessCentral/test/app.json +++ b/businessCentral/test/app.json @@ -2,7 +2,7 @@ "id": "03486619-1622-4261-ae0e-b366b3c96e3c", "name": "Azure Data Lake Storage Export Tests", "publisher": "The bc2adls team", - "version": "2.19.0.0", + "version": "2.20.0.0", "brief": "Tests for Azure Data Lake Storage Export", "description": "Tests for Azure Data Lake Storage Export", "privacyStatement": "", From a9a498f85428e536411cc3b36326296fcf567721 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Thu, 7 Mar 2024 09:15:17 +0100 Subject: [PATCH 22/24] Refactor ADLSE Http code and remove Maximum Retries field from ADLSE Setup page --- businessCentral/app/src/Http.Codeunit.al | 40 ++---------------------- businessCentral/app/src/Setup.Page.al | 4 --- 2 files changed, 3 insertions(+), 41 deletions(-) diff --git a/businessCentral/app/src/Http.Codeunit.al b/businessCentral/app/src/Http.Codeunit.al index de27160..c8986f7 100644 --- a/businessCentral/app/src/Http.Codeunit.al +++ b/businessCentral/app/src/Http.Codeunit.al @@ -109,12 +109,8 @@ codeunit 82563 "ADLSE Http" HttpContent: HttpContent; HeaderKey: Text; HeaderValue: Text; - MaxRetries: Integer; - RetryCount: Integer; begin ADLSESetup.GetSingleton(); - MaxRetries := ADLSESetup."Maximum Retries"; - RetryCount := 0; HttpClient.SetBaseAddress(Url); if not AddAuthorization(HttpClient, Response) then @@ -137,17 +133,7 @@ codeunit 82563 "ADLSE Http" HttpRequestMessage.Method('PUT'); HttpRequestMessage.SetRequestUri(Url); AddContent(HttpContent); - while RetryCount < MaxRetries do begin - HttpClient.Put(Url, HttpContent, HttpResponseMessage); - if HttpResponseMessage.IsSuccessStatusCode() then - exit - else begin - RetryCount += 1; - if RetryCount >= MaxRetries then - exit; - Sleep(5000); - end; - end; + HttpClient.Put(Url, HttpContent, HttpResponseMessage); end; "ADLSE Http Method"::Delete: HttpClient.Delete(Url, HttpResponseMessage); @@ -157,33 +143,13 @@ codeunit 82563 "ADLSE Http" HttpRequestMessage.SetRequestUri(Url); AddContent(HttpContent); HttpRequestMessage.Content(HttpContent); - while RetryCount < MaxRetries do begin - HttpClient.Send(HttpRequestMessage, HttpResponseMessage); - if HttpResponseMessage.IsSuccessStatusCode() then - exit - else begin - RetryCount += 1; - if RetryCount >= MaxRetries then - exit; - Sleep(5000); - end; - end; + HttpClient.Send(HttpRequestMessage, HttpResponseMessage); end; "ADLSE Http Method"::Head: begin HttpRequestMessage.Method('HEAD'); HttpRequestMessage.SetRequestUri(Url); - while RetryCount < MaxRetries do begin - HttpClient.Send(HttpRequestMessage, HttpResponseMessage); - if HttpResponseMessage.IsSuccessStatusCode() then - exit - else begin - RetryCount += 1; - if RetryCount >= MaxRetries then - exit; - Sleep(5000); - end; - end; + HttpClient.Send(HttpRequestMessage, HttpResponseMessage); end; else Error(UnsupportedMethodErr, HttpMethod); diff --git a/businessCentral/app/src/Setup.Page.al b/businessCentral/app/src/Setup.Page.al index 9980ac2..ce4261e 100644 --- a/businessCentral/app/src/Setup.Page.al +++ b/businessCentral/app/src/Setup.Page.al @@ -153,10 +153,6 @@ page 82560 "ADLSE Setup" ToolTip = 'Specifies if the table will be deleted if a reset of the table is done.'; Editable = not AzureDataLake; } - field("Maximum retries"; Rec."Maximum Retries") - { - ToolTip = 'Specifies the maximum number of retries of a request. The maximum value is 10.'; - } } } part(Tables; "ADLSE Setup Tables") From e9f0b9deed88d7860cf91f5832c134d065dd5e35 Mon Sep 17 00:00:00 2001 From: Bertverbeek4PS Date: Thu, 7 Mar 2024 08:19:17 +0000 Subject: [PATCH 23/24] New Version number 2.21 --- .AL-Go/settings.json | 2 +- businessCentral/app/app.json | 2 +- businessCentral/test/app.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.AL-Go/settings.json b/.AL-Go/settings.json index 921a34f..a6f9e60 100644 --- a/.AL-Go/settings.json +++ b/.AL-Go/settings.json @@ -7,5 +7,5 @@ "businessCentral\\test" ], "bcptTestFolders": [], - "repoVersion": "2.20" + "repoVersion": "2.21" } diff --git a/businessCentral/app/app.json b/businessCentral/app/app.json index dc126e3..bc83e85 100644 --- a/businessCentral/app/app.json +++ b/businessCentral/app/app.json @@ -4,7 +4,7 @@ "publisher": "The bc2adls team", "brief": "Sync data from Business Central to the Azure storage", "description": "Exports data in chosen tables to the Azure Data Lake and keeps it in sync by incremental updates. Before you use this tool, please read the SUPPORT.md file at https://github.com/microsoft/bc2adls.", - "version": "2.20.0.0", + "version": "2.21.0.0", "privacyStatement": "https://go.microsoft.com/fwlink/?LinkId=724009", "EULA": "https://go.microsoft.com/fwlink/?linkid=2009120", "help": "https://go.microsoft.com/fwlink/?LinkId=724011", diff --git a/businessCentral/test/app.json b/businessCentral/test/app.json index 377a178..1a5f444 100644 --- a/businessCentral/test/app.json +++ b/businessCentral/test/app.json @@ -2,7 +2,7 @@ "id": "03486619-1622-4261-ae0e-b366b3c96e3c", "name": "Azure Data Lake Storage Export Tests", "publisher": "The bc2adls team", - "version": "2.20.0.0", + "version": "2.21.0.0", "brief": "Tests for Azure Data Lake Storage Export", "description": "Tests for Azure Data Lake Storage Export", "privacyStatement": "", From a18ef56407880119a256eccc064103fd50be4bd2 Mon Sep 17 00:00:00 2001 From: Bert Verbeek Date: Thu, 7 Mar 2024 16:00:02 +0100 Subject: [PATCH 24/24] Refactor code to improve performance and readability --- fabric/CopyBusinessCentral.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fabric/CopyBusinessCentral.ipynb b/fabric/CopyBusinessCentral.ipynb index 821248b..46662bc 100644 --- a/fabric/CopyBusinessCentral.ipynb +++ b/fabric/CopyBusinessCentral.ipynb @@ -1 +1 @@ -{"cells":[{"cell_type":"markdown","id":"ae53e9bf-8787-4d07-b709-d896fd16cc5f","metadata":{"editable":false,"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"source":["## Business Central merge data notebook\n","In this part the files in the delta folder will be merge with the Lakehouse table.\n","- It iterates first on the folders to append to the existing table.\n","- After that is will remove all duplicates by sorting the table. \n","- At last it will remove all deleted records inside the table that are deleted in Business Central\n","\n","Please change the parameters in the first part."]},{"cell_type":"code","execution_count":null,"id":"34dc5721-e317-4dc0-88ef-2c6bafb494da","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.6812441Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:06.8530455Z\",\"execution_finish_time\":\"2023-08-15T09:15:07.1828235Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","# settings\n","spark.conf.set(\"sprk.sql.parquet.vorder.enabled\",\"true\")\n","spark.conf.set(\"spark.microsoft.delta.optimizewrite.enabled\",\"true\")\n","spark.conf.set(\"spark.sql.parquet.filterPushdown\", \"true\")\n","spark.conf.set(\"spark.sql.parquet.mergeSchema\", \"false\")\n","spark.conf.set(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\")\n","spark.conf.set(\"spark.sql.delta.commitProtocol.enabled\", \"true\")\n","\n","# file paths\n","folder_path_spark = 'Files/deltas/' # this is mostly the default\n","folder_path_json = '/lakehouse/default/Files/' # this is mostly the default\n","folder_path_reset = '/lakehouse/default/Files/reset/' # this is mostly the default\n","folder_path = '/lakehouse/default/Files/deltas/' # this is mostly the default\n","\n","# parameters\n","workspace = 'businessCentral' #can also be a GUID\n","Lakehouse = 'businessCentral'; #can also be a GUID\n","Remove_delta = True; #will remove the delta files if everything is processed\n","Drop_table_if_mismatch = False; #option to drop the table if json file has different columns then in the table\n","no_Partition = 258 #how many partition is used in the dataframe, a good starting point might be 2-4 partitions per CPU core in your Spark cluster"]},{"cell_type":"code","execution_count":null,"id":"33ddc3d7","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import json\n","from pyspark.sql.types import *\n","\n","if Drop_table_if_mismatch:\n","\n"," def count_keys(obj): \n"," if isinstance(obj, dict): \n"," return len(obj) + sum(count_keys(v) for v in obj.values()) \n"," if isinstance(obj, list): \n"," return sum(count_keys(v) for v in obj) \n"," return 0 \n","\n"," for filename in os.listdir(folder_path_json):\n"," if \"manifest\" not in filename: # exclude the manifest files\n"," if filename.endswith(\".cdm.json\"):\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".cdm.json\",\"\")\n","\n"," if table_name in [t.name for t in spark.catalog.listTables()]:\n"," #count number of columns in excisting table\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," df = spark.sql(SQL_Query)\n"," num_cols_table = len(df.columns) \n","\n"," #count number of columns in json file \n"," f = open(folder_path_json + filename)\n"," schema = json.load(f)\n"," has_attributes = schema[\"definitions\"][0][\"hasAttributes\"] \n"," num_names = len(has_attributes)\n","\n"," if num_cols_table != num_names:\n"," df = spark.sql(\"DROP TABLE IF EXISTS\"+ Lakehouse + \".\" + table_name)"]},{"cell_type":"code","execution_count":null,"id":"5669531f","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import glob\n","from pyspark.sql.types import *\n","\n","for filename in os.listdir(folder_path_reset):\n"," # Remove the table\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".txt\",\"\")\n","\n"," df = spark.sql(\"DROP TABLE IF EXISTS \"+ Lakehouse + \".\" + table_name)\n","\n"," try: \n"," os.remove(folder_path_reset + '/' + filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")"]},{"cell_type":"code","execution_count":null,"id":"0594c099-6512-4777-82e2-9a3a058512fe","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.7249665Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:07.7601315Z\",\"execution_finish_time\":\"2023-08-15T09:15:18.128035Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","collapsed":false,"editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","import json\n","import os\n","import glob\n","from pyspark.sql.types import *\n","from pyspark.sql.utils import AnalysisException\n","from pyspark.sql.functions import col\n","from pyspark.sql.functions import desc\n","file_list = []\n","\n","for entry in os.scandir(folder_path):\n"," if entry.is_dir():\n","\n"," for filename in glob.glob(folder_path + entry.name + '/*'): \n"," table_name = entry.name.replace(\"-\",\"\")\n"," ContainsCompany = False\n"," df_new = spark.read.option(\"minPartitions\", no_Partition).format(\"csv\").option(\"header\",\"true\").load(folder_path_spark + entry.name +\"/*\") \n"," file_list.append(filename) #collect the imported filed in a list for deletion later on\n","\n"," f = open(folder_path_json + entry.name +\".cdm.json\")\n"," schema = json.load(f)\n"," # Parse the schema to get column names and data types\n"," column_names = [attr[\"name\"] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," if '$Company' in column_names:\n"," ContainsCompany = True\n"," column_types = [attr['dataFormat'] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," for col_name, col_type in zip(column_names, column_types):\n"," if col_type == \"String\":\n"," col_type = \"string\"\n"," if col_type == \"Guid\":\n"," col_type = \"string\"\n"," if col_type == \"Code\":\n"," col_type = \"object\"\n"," if col_type == \"Option\":\n"," col_type = \"string\"\n"," if col_type == \"Date\":\n"," col_type = \"date\"\n"," if col_type == \"Time\":\n"," col_type = \"string\"\n"," if col_type == \"DateTime\":\n"," col_type = \"date\"\n"," if col_type == \"Duration\":\n"," col_type = \"timedelta\"\n"," if col_type == \"Decimal\":\n"," col_type = \"float\"\n"," if col_type == \"Boolean\":\n"," col_type = \"boolean\"\n"," if col_type == \"Integer\":\n"," col_type = \"int\"\n"," if col_type == \"Int64\":\n"," col_type = \"int\"\n"," if col_type == \"Int32\":\n"," col_type = \"int\"\n","\n"," df_new = df_new.withColumn(col_name, df_new[col_name].cast(col_type))\n","\n","\n"," #check if the table excists\n"," if table_name in [t.name for t in spark.catalog.listTables()]: \n"," #read the old data into a new dataframe and union with the new dataframe\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," #print(SQL_Query)\n"," df_old = spark.sql(SQL_Query)\n"," df_new = df_new.union(df_old).repartition(no_Partition)\n","\n"," #delete all old records\n"," df_deletes = df_new.filter(df_new['SystemCreatedAt-2000000001'].isNull())\n"," if ContainsCompany:\n"," df_new = df_new.join(df_deletes, ['$Company','systemId-2000000000'], 'leftanti')\n"," else:\n"," df_new = df_new.join(df_deletes, ['systemId-2000000000'], 'leftanti')\n"," \n"," # remove duplicates by filtering on systemID and systemModifiedAt fields\n"," if ContainsCompany:\n"," df_new = df_new.orderBy('$Company','systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['$Company','systemId-2000000000'])\n"," else:\n"," df_new = df_new.orderBy('systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['systemId-2000000000'])\n"," \n"," #overwrite the dataframe in the new table\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name) \n"," else: \n"," #table isn't there so just insert it\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name)\n","\n"," #delete the files\n"," if Remove_delta:\n"," for filename in file_list: \n"," try: \n"," os.remove(filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")\n"," file_list = [] # clear the list"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"host":{"synapse_widget":{"state":{},"token":"a69b4b72-86b0-4373-b695-ef01cd53bbb1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","known_lakehouses":"[{\"id\":\"9fbacb3e-d0df-43a4-814b-abe4cb623a81\"}]"}}},"language":"python","ms_spell_check":{"ms_spell_check_language":"en"}},"notebook_environment":{},"nteract":{"version":"nteract-front-end@1.0.0"},"save_output":true,"spark_compute":{"compute_id":"/trident/default","session_options":{"conf":{},"enableDebugMode":false}},"synapse_widget":{"state":{},"version":"0.1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","default_lakehouse_name":"businessCentral","default_lakehouse_workspace_id":"21a92229-a0fb-4256-86bd-4b847b8006ed","known_lakehouses":[{"id":"9fbacb3e-d0df-43a4-814b-abe4cb623a81"}]}},"widgets":{}},"nbformat":4,"nbformat_minor":5} +{"cells":[{"cell_type":"markdown","id":"ae53e9bf-8787-4d07-b709-d896fd16cc5f","metadata":{"editable":false,"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"source":["## Business Central merge data notebook\n","In this part the files in the delta folder will be merge with the Lakehouse table.\n","- It iterates first on the folders to append to the existing table.\n","- After that is will remove all duplicates by sorting the table. \n","- At last it will remove all deleted records inside the table that are deleted in Business Central\n","\n","Please change the parameters in the first part."]},{"cell_type":"code","execution_count":null,"id":"34dc5721-e317-4dc0-88ef-2c6bafb494da","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.6812441Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:06.8530455Z\",\"execution_finish_time\":\"2023-08-15T09:15:07.1828235Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","# settings\n","spark.conf.set(\"sprk.sql.parquet.vorder.enabled\",\"true\")\n","spark.conf.set(\"spark.microsoft.delta.optimizewrite.enabled\",\"true\")\n","spark.conf.set(\"spark.sql.parquet.filterPushdown\", \"true\")\n","spark.conf.set(\"spark.sql.parquet.mergeSchema\", \"false\")\n","spark.conf.set(\"spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version\", \"2\")\n","spark.conf.set(\"spark.sql.delta.commitProtocol.enabled\", \"true\")\n","\n","# file paths\n","folder_path_spark = 'Files/deltas/' # this is mostly the default\n","folder_path_json = '/lakehouse/default/Files/' # this is mostly the default\n","folder_path_reset = '/lakehouse/default/Files/reset/' # this is mostly the default\n","folder_path = '/lakehouse/default/Files/deltas/' # this is mostly the default\n","\n","# parameters\n","workspace = 'businessCentral' #can also be a GUID\n","Lakehouse = 'businessCentral'; #can also be a GUID\n","Remove_delta = True; #will remove the delta files if everything is processed\n","Drop_table_if_mismatch = False; #option to drop the table if json file has different columns then in the table\n","no_Partition = 258 #how many partition is used in the dataframe, a good starting point might be 2-4 partitions per CPU core in your Spark cluster"]},{"cell_type":"code","execution_count":null,"id":"33ddc3d7","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import json\n","from pyspark.sql.types import *\n","\n","if Drop_table_if_mismatch:\n","\n"," def count_keys(obj): \n"," if isinstance(obj, dict): \n"," return len(obj) + sum(count_keys(v) for v in obj.values()) \n"," if isinstance(obj, list): \n"," return sum(count_keys(v) for v in obj) \n"," return 0 \n","\n"," for filename in os.listdir(folder_path_json):\n"," if \"manifest\" not in filename: # exclude the manifest files\n"," if filename.endswith(\".cdm.json\"):\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".cdm.json\",\"\")\n","\n"," if table_name in [t.name for t in spark.catalog.listTables()]:\n"," #count number of columns in excisting table\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," df = spark.sql(SQL_Query)\n"," num_cols_table = len(df.columns) \n","\n"," #count number of columns in json file \n"," f = open(folder_path_json + filename)\n"," schema = json.load(f)\n"," has_attributes = schema[\"definitions\"][0][\"hasAttributes\"] \n"," num_names = len(has_attributes)\n","\n"," if num_cols_table != num_names:\n"," df = spark.sql(\"DROP TABLE IF EXISTS\"+ Lakehouse + \".\" + table_name)"]},{"cell_type":"code","execution_count":null,"id":"5669531f","metadata":{},"outputs":[],"source":["%%pyspark\n","import os\n","import glob\n","from pyspark.sql.types import *\n","\n","if os.path.exists(folder_path_reset):\n"," for filename in os.listdir(folder_path_reset):\n"," # Remove the table\n"," table_name = filename.replace(\"-\",\"\")\n"," table_name = table_name.replace(\".txt\",\"\")\n","\n"," df = spark.sql(\"DROP TABLE IF EXISTS \"+ Lakehouse + \".\" + table_name)\n","\n"," try: \n"," os.remove(folder_path_reset + '/' + filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")"]},{"cell_type":"code","execution_count":null,"id":"0594c099-6512-4777-82e2-9a3a058512fe","metadata":{"cellStatus":"{\"MOD Administrator\":{\"queued_time\":\"2023-08-15T09:15:05.7249665Z\",\"session_start_time\":null,\"execution_start_time\":\"2023-08-15T09:15:07.7601315Z\",\"execution_finish_time\":\"2023-08-15T09:15:18.128035Z\",\"state\":\"finished\",\"livy_statement_state\":\"available\"}}","collapsed":false,"editable":false,"jupyter":{"outputs_hidden":false,"source_hidden":false},"microsoft":{"language":"python"},"nteract":{"transient":{"deleting":false}},"run_control":{"frozen":false}},"outputs":[],"source":["%%pyspark\n","import json\n","import os\n","import glob\n","from pyspark.sql.types import *\n","from pyspark.sql.utils import AnalysisException\n","from pyspark.sql.functions import col\n","from pyspark.sql.functions import desc\n","file_list = []\n","\n","for entry in os.scandir(folder_path):\n"," if entry.is_dir():\n","\n"," for filename in glob.glob(folder_path + entry.name + '/*'): \n"," table_name = entry.name.replace(\"-\",\"\")\n"," ContainsCompany = False\n"," df_new = spark.read.option(\"minPartitions\", no_Partition).format(\"csv\").option(\"header\",\"true\").load(folder_path_spark + entry.name +\"/*\") \n"," file_list.append(filename) #collect the imported filed in a list for deletion later on\n","\n"," f = open(folder_path_json + entry.name +\".cdm.json\")\n"," schema = json.load(f)\n"," # Parse the schema to get column names and data types\n"," column_names = [attr[\"name\"] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," if '$Company' in column_names:\n"," ContainsCompany = True\n"," column_types = [attr['dataFormat'] for attr in schema[\"definitions\"][0][\"hasAttributes\"]] \n"," for col_name, col_type in zip(column_names, column_types):\n"," if col_type == \"String\":\n"," col_type = \"string\"\n"," if col_type == \"Guid\":\n"," col_type = \"string\"\n"," if col_type == \"Code\":\n"," col_type = \"object\"\n"," if col_type == \"Option\":\n"," col_type = \"string\"\n"," if col_type == \"Date\":\n"," col_type = \"date\"\n"," if col_type == \"Time\":\n"," col_type = \"string\"\n"," if col_type == \"DateTime\":\n"," col_type = \"date\"\n"," if col_type == \"Duration\":\n"," col_type = \"timedelta\"\n"," if col_type == \"Decimal\":\n"," col_type = \"float\"\n"," if col_type == \"Boolean\":\n"," col_type = \"boolean\"\n"," if col_type == \"Integer\":\n"," col_type = \"int\"\n"," if col_type == \"Int64\":\n"," col_type = \"int\"\n"," if col_type == \"Int32\":\n"," col_type = \"int\"\n","\n"," df_new = df_new.withColumn(col_name, df_new[col_name].cast(col_type))\n","\n","\n"," #check if the table excists\n"," if table_name in [t.name for t in spark.catalog.listTables()]: \n"," #read the old data into a new dataframe and union with the new dataframe\n"," SQL_Query = \"SELECT * FROM \" + Lakehouse +\".\"+table_name; \n"," #print(SQL_Query)\n"," df_old = spark.sql(SQL_Query)\n"," df_new = df_new.union(df_old).repartition(no_Partition)\n","\n"," #delete all old records\n"," df_deletes = df_new.filter(df_new['SystemCreatedAt-2000000001'].isNull())\n"," if ContainsCompany:\n"," df_new = df_new.join(df_deletes, ['$Company','systemId-2000000000'], 'leftanti')\n"," else:\n"," df_new = df_new.join(df_deletes, ['systemId-2000000000'], 'leftanti')\n"," \n"," # remove duplicates by filtering on systemID and systemModifiedAt fields\n"," if ContainsCompany:\n"," df_new = df_new.orderBy('$Company','systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['$Company','systemId-2000000000'])\n"," else:\n"," df_new = df_new.orderBy('systemId-2000000000',desc('SystemModifiedAt-2000000003'))\n"," df_new = df_new.dropDuplicates(['systemId-2000000000'])\n"," \n"," #overwrite the dataframe in the new table\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name) \n"," else: \n"," #table isn't there so just insert it\n"," df_new.write.mode(\"overwrite\").format(\"delta\").save(\"Tables/\" + table_name)\n","\n"," #delete the files\n"," if Remove_delta:\n"," for filename in file_list: \n"," try: \n"," os.remove(filename) \n"," except OSError as e: # this would catch any error when trying to delete the file \n"," print(f\"Error: {filename} : {e.strerror}\")\n"," file_list = [] # clear the list"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"host":{"synapse_widget":{"state":{},"token":"a69b4b72-86b0-4373-b695-ef01cd53bbb1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","known_lakehouses":"[{\"id\":\"9fbacb3e-d0df-43a4-814b-abe4cb623a81\"}]"}}},"language":"python","ms_spell_check":{"ms_spell_check_language":"en"}},"notebook_environment":{},"nteract":{"version":"nteract-front-end@1.0.0"},"save_output":true,"spark_compute":{"compute_id":"/trident/default","session_options":{"conf":{},"enableDebugMode":false}},"synapse_widget":{"state":{},"version":"0.1"},"trident":{"lakehouse":{"default_lakehouse":"9fbacb3e-d0df-43a4-814b-abe4cb623a81","default_lakehouse_name":"businessCentral","default_lakehouse_workspace_id":"21a92229-a0fb-4256-86bd-4b847b8006ed","known_lakehouses":[{"id":"9fbacb3e-d0df-43a4-814b-abe4cb623a81"}]}},"widgets":{}},"nbformat":4,"nbformat_minor":5}