diff --git a/.github/workflows/updateChangelogs.yaml b/.github/workflows/updateChangelogs.yaml index d9d9f1b63..72b3e1e37 100644 --- a/.github/workflows/updateChangelogs.yaml +++ b/.github/workflows/updateChangelogs.yaml @@ -50,11 +50,11 @@ jobs: - name: Commit and Push Changes run: | - if [ -n "$(git status --porcelain)" ]; then - echo "Updating with new CHANGELOG.md"; - git add CHANGELOG.md - git commit -m "Updated main CHANGELOG.md file with latest merged release." + if [ -n "$(git status --porcelain changelog*)" ]; then + echo "Updating with new changelog files"; + git add changelog* + git commit -m "Updated changelog files with latest merged release." git push else - echo "No changes made to CHANGELOG.md"; + echo "No changes made to changelog files"; fi diff --git a/CHANGELOG.md b/changelog.md similarity index 100% rename from CHANGELOG.md rename to changelog.md diff --git a/changelog/v2.11.2/changelog.md b/changelog/v2.11.2/changelog.md index 484740a23..822909604 100644 --- a/changelog/v2.11.2/changelog.md +++ b/changelog/v2.11.2/changelog.md @@ -20,9 +20,12 @@ ### NEW HARDWARE +> NOTE: +>> If any new hardware was added to current version, it will be listed here. + Support added for following hardware: -+ [Mikromedia 3 for PIC32MZ RESISTIVE](https://www.mikroe.com/mikromedia-3-for-pic32mz-resistive) ++ **[2024-08-23](./new_hw/2024-08-23.md)** --- diff --git a/changelog/v2.11.2/new_hw/2024-08-23.md b/changelog/v2.11.2/new_hw/2024-08-23.md new file mode 100644 index 000000000..d9dd10c5a --- /dev/null +++ b/changelog/v2.11.2/new_hw/2024-08-23.md @@ -0,0 +1,29 @@ +

+ MikroElektronika +

+ +--- + +**[BACK TO PREVIOUS FILE](../changelog.md)** + +--- + +# `2024-08-23` + +## Changes + +- [`2024-08-23`](#2024-08-23) + - [Changes](#changes) + - [NEW HARDWARE](#new-hardware) + +### NEW HARDWARE + +Support added for following hardware: + ++ [Mikromedia 3 for PIC32MZ RESISTIVE](https://www.mikroe.com/mikromedia-3-for-pic32mz-resistive) + +--- + +**[BACK TO PREVIOUS FILE](../changelog.md)** + +--- diff --git a/scripts/index.py b/scripts/index.py index 6b9909334..7d79b4094 100644 --- a/scripts/index.py +++ b/scripts/index.py @@ -55,12 +55,39 @@ def fetch_json_data(download_link, token): print(f"Error fetching JSON data: {e}") return None, str(e) -# Function to find an item by name -def find_item_by_name(items, name): - for item in items: - if item['name'] == name: - return item - return None +def check_from_index(es: Elasticsearch, index_name, asset): + # Search query to use + query_search = { + "size": 5000, + "query": { + "match_all": {} + } + } + + # Search the base with provided query + num_of_retries = 1 + while num_of_retries <= 10: + try: + response = es.search(index=index_name, body=query_search) + if not response['timed_out']: + break + except: + print("Executing search query - retry number %i" % num_of_retries) + num_of_retries += 1 + + version = '1.0.0' + for eachHit in response['hits']['hits']: + if not 'name' in eachHit['_source']: + continue ## TODO - Check newly created bare metal package (is it created correctly) + name = eachHit['_source']['name'] + if name == asset: + version = eachHit['_source']['version'] + + return version + +def increment_version(version): + major, minor, patch = map(int, version.split('.')) + return f"{major}.{minor}.{patch + 1}" # Function to index release details into Elasticsearch def index_release_to_elasticsearch(es : Elasticsearch, index_name, release_details, token): @@ -73,20 +100,23 @@ def index_release_to_elasticsearch(es : Elasticsearch, index_name, release_detai metadata_download_url = metadata_asset['url'] metadata_content.append(fetch_json_data(metadata_download_url, token)[0]) - version = None ## 0 is new one being indexed, 1 in previously indexed release - for asset in release_details[0].get('assets', []): - if 'mikrosdk.7z' == asset['name']: - # Download the current mikroSDK asset in order to read the version - support.extract_archive_from_url( - asset['url'], - os.path.join(os.path.dirname(__file__), 'tmp'), - token - ) - - # Then fetch version from manifest file - version = support.fetch_version_from_asset(os.path.join(os.path.dirname(__file__), 'tmp')) - break + if 'mikrosdk' in metadata_content[0]: + version = metadata_content[0]['mikrosdk']['version'] + else: + for asset in release_details[0].get('assets', []): + if 'mikrosdk.7z' == asset['name']: + # Download the current mikroSDK asset in order to read the version + support.extract_archive_from_url( + asset['url'], + os.path.join(os.path.dirname(__file__), 'tmp'), + token + ) + + # Then fetch version from manifest file + version = support.fetch_version_from_asset(os.path.join(os.path.dirname(__file__), 'tmp')) + break + for asset in release_details[0].get('assets', []): doc = None name_without_extension = os.path.splitext(os.path.basename(asset['name']))[0] @@ -137,6 +167,35 @@ def index_release_to_elasticsearch(es : Elasticsearch, index_name, release_detai "install_location" : "%APPLICATION_DATA_DIR%/resources/images", "package_changed": package_changed } + elif asset['name'].startswith('board') or asset['name'].startswith('mikromedia'): + board_version_new = '1.0.0' + board_version_previous = '0.0.0' + if 'packages' in metadata_content[1]: + if name_without_extension in metadata_content[1]['packages']: + if 'hash' in metadata_content[1]['packages'][name_without_extension]: + board_version_previous = check_from_index(es, index_name, asset['name']) + board_version_new = board_version_previous + if metadata_content[0]['packages'][name_without_extension]['hash'] != metadata_content[1]['packages'][name_without_extension]['hash']: + board_version_new = increment_version(board_version_previous) + for each_package in metadata_content[0]['packages']: + if metadata_content[0]['packages'][each_package]['package_name'] == name_without_extension: + package_name = metadata_content[0]['packages'][each_package]['display_name'] + break + doc = { + 'name': metadata_content[0]['packages'][package_name]['package_name'], + 'display_name': metadata_content[0]['packages'][package_name]['display_name'], + 'author': 'MIKROE', + 'hidden': False, + "icon": metadata_content[0]['packages'][package_name]['icon'], + 'type': metadata_content[0]['packages'][package_name]['type'], + 'version': board_version_new, + 'created_at' : asset['created_at'], + 'updated_at' : asset['updated_at'], + 'category': metadata_content[0]['packages'][package_name]['category'], + 'download_link': asset['url'], # Adjust as needed for actual URL + "install_location" : metadata_content[0]['packages'][package_name]['install_location'], + 'package_changed': board_version_previous != board_version_new + } # Index the document if doc: diff --git a/scripts/log_changes.py b/scripts/log_changes.py index 3515d96bc..441407989 100644 --- a/scripts/log_changes.py +++ b/scripts/log_changes.py @@ -1,16 +1,27 @@ import os, re from packaging import version +from datetime import datetime import support as utility +# Function to extract date from filename +def extract_date(filename): + # Extract the date part (assuming format 'YYYY-MM-DD') + date_str = filename.split('.')[0] + # Convert to a datetime object for proper sorting + return datetime.strptime(date_str, '%Y-%m-%d') + +changelog_root = os.path.join(os.getcwd(), 'changelog') +all_changelog_dirs = os.listdir(changelog_root) + ## Get all currently present changelog versions all_versions = sorted( - [x for x in os.listdir(os.path.join(os.getcwd(), 'changelog'))], + [x for x in all_changelog_dirs], key=lambda x: version.parse(x), reverse=True ) ## Get file content first -with open(os.path.join(os.getcwd(),'CHANGELOG.md'), 'r') as main_changelog_file: +with open(os.path.join(os.getcwd(),'changelog.md'), 'r') as main_changelog_file: main_changelog = main_changelog_file.readlines() main_changelog_file.close() @@ -22,10 +33,40 @@ ## Create new links array_of_links.append(f'+ **[{each_version}](./changelog/{each_version}/changelog.md)**') -## Then write the new main CHANGELOG.md content -with open(os.path.join(os.getcwd(),'CHANGELOG.md'), 'w') as main_changelog_file: +## Then write the new main changelog.md content +with open(os.path.join(os.getcwd(),'changelog.md'), 'w') as main_changelog_file: main_changelog_file.writelines(''.join(main_changelog).replace('**VERSIONS:**', '\n'.join(array_of_links))) main_changelog_file.close() ## And remove any occurrences of more than 1 sequential empty row -utility.filter_multiple_empty_rows(os.path.join(os.getcwd(),'CHANGELOG.md')) +utility.filter_multiple_empty_rows(os.path.join(os.getcwd(),'changelog.md')) + +## Update CHANGELOG files with specific date releases +array_of_links = ['Support added for following hardware:\n'] +for each_version in all_versions: + ## Get file content first + with open(os.path.join(changelog_root, each_version, 'changelog.md'), 'r') as sub_changelog_file: + sub_changelog = sub_changelog_file.readlines() + sub_changelog_file.close() + + ## Remove lines that contain links + sub_changelog = [line for line in sub_changelog if not re.match(r'^\+ \*\*\[\d{4}-\d{2}-\d{2}\]\(\./new_hw/\d{4}-\d{2}-\d{2}\.md\)\*\*$', line)] + + ## Get file list + current_files = os.listdir(os.path.join(changelog_root, each_version)) + if len(current_files) > 1: + print('Updated changelog file at: %s' % os.path.join(changelog_root, each_version)) + if 'new_hw' in current_files: + ## Sort the files based on date (newest go to top of the file) + all_sub_files = sorted(os.listdir(os.path.join(changelog_root, each_version, 'new_hw')), key=extract_date) + for each_sub_file in all_sub_files: + ## Create new links + array_of_links.append(f'+ **[{each_sub_file.split('.')[0]}](./new_hw/{each_sub_file})**') + + ## Then write the new sub changelog.md content + with open(os.path.join(changelog_root, each_version, 'changelog.md'), 'w') as sub_changelog_file: + sub_changelog_file.writelines(''.join(sub_changelog).replace('Support added for following hardware:', '\n'.join(array_of_links))) + sub_changelog_file.close() + + ## And remove any occurrences of more than 1 sequential empty row + utility.filter_multiple_empty_rows(os.path.join(changelog_root, each_version, 'changelog.md')) diff --git a/scripts/package.py b/scripts/package.py index 82ee815ad..2aa1a341a 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -1,9 +1,33 @@ import os, re, py7zr, \ requests, argparse, \ - json, hashlib + json, hashlib, shutil, \ + sqlite3 import support as support +def functionRegex(value, pattern): + reg = re.compile(r"\b" + value + r"\b") + return reg.search(pattern) is not None + +def read_data_from_db(db, sql_query): + ## Open the database / connect to it + con = sqlite3.connect(db) + cur = con.cursor() + + ## Create the REGEXP function to be used in DB + con.create_function("REGEXP", 2, functionRegex) + + ## Execute the desired query + results = cur.execute(sql_query).fetchall() + # results = cur.fetchall() + + ## Close the connection + cur.close() + con.close() + + ## Return query results + return len(results), results + def find_manifest_folder(base_dir): """Find the folder containing 'manifest.json'.""" for root, dirs, files in os.walk(base_dir): @@ -15,9 +39,12 @@ def create_7z_archive(version, source_folder, archive_path): """Create a .7z archive from a source folder with a specific folder structure, excluding the .github folder.""" with py7zr.SevenZipFile(archive_path, 'w') as archive: for root, dirs, files in os.walk(source_folder): - if re.search(r'(\.git)|(scripts)|(templates)|(changelog)|(resources)', os.path.relpath(root, source_folder)): - continue + if re.search(r'(\.git)|(\.vscode)|(scripts)|(templates)|(changelog)|(resources)|(bsp/board/include/(boards|shields))', os.path.relpath(root, source_folder)): + if not 'board_generic' in os.path.relpath(root, source_folder): + continue for file in files: + if re.search(r'\.git', file): + continue file_path = os.path.join(root, file) # Exclude the archive itself if file_path == archive_path: @@ -31,13 +58,50 @@ def create_custom_archive(source_folder, archive_path): archive.writeall('./') def upload_asset_to_release(repo, release_id, asset_path, token): - """Upload an asset to a specific GitHub release.""" + """Upload an asset to a specific GitHub release. If the asset exists, delete it first.""" + asset_name = os.path.basename(asset_path) + url = f'https://api.github.com/repos/{repo}/releases/{release_id}/assets' + headers = { + 'Authorization': f'token {token}' + } + + # Handle pagination to get all assets + page = 1 + asset_deleted = False + while True: + if asset_deleted: + break + url = f'https://api.github.com/repos/{repo}/releases/{release_id}/assets?page={page}&per_page=30' + response = requests.get(url, headers=headers) + response.raise_for_status() + assets = response.json() + + # If no more assets, break out of loop + if not assets: + break + + # Check if the asset already exists + for asset in assets: + if asset['name'] == asset_name: + # If the asset exists, delete it + delete_url = asset['url'] + print(f'Deleting existing asset: {asset_name}') + delete_response = requests.delete(delete_url, headers=headers) + delete_response.raise_for_status() + print(f'Asset deleted: {asset_name}') + asset_deleted = True + break + + page += 1 + + # Upload the new asset url = f'https://uploads.github.com/repos/{repo}/releases/{release_id}/assets?name={os.path.basename(asset_path)}' headers = { 'Authorization': f'token {token}', 'Content-Type': 'application/x-7z-compressed' } with open(asset_path, 'rb') as file: + print(f'Uploading new asset: {asset_name}') response = requests.post(url, headers=headers, data=file) response.raise_for_status() print(f'Uploaded asset: {os.path.basename(asset_path)} to release ID: {release_id}') @@ -95,6 +159,166 @@ def hash_directory_contents(directory): combined_hash = hashlib.md5("".join(all_hashes).encode()).hexdigest() return combined_hash +def extract_board_info(each_path, file_content): + # Regex to match the board name + board_name_match = re.search(r'if\(\${_MSDK_BOARD_NAME_} STREQUAL "(.*?)"\)', file_content) + # Regex to match the SHIELD value + shield_value_match = re.search(r'set\(SHIELD (TRUE|FALSE)\)', file_content) + + if board_name_match: + board_name = board_name_match.group(1) + if shield_value_match: + shield_value = shield_value_match.group(1) + else: + shield_value = False + return board_name, (True if 'TRUE' == shield_value else False) + else: + print("Some values not extracted for %s." % each_path) + + return None, None + +def check_database_for_shield(db, board): + db_check = read_data_from_db( + db, 'SELECT sdk_config FROM Boards WHERE sdk_config REGEXP ' + f'"{board}"' + ) + + if db_check[0]: + json_object = json.loads(db_check[1][0][0]) + if '_MSDK_SHIELD_' in json_object: + return json_object['_MSDK_SHIELD_'] + else: + db_check = read_data_from_db( + db, f'SELECT display FROM Boards WHERE sdk_config REGEXP ' + f'"{board}"' + ) + db_check = read_data_from_db( + db, f'SELECT sdk_config FROM Displays WHERE uid == "{db_check[1][0][0]}"' + ) + json_object = json.loads(db_check[1][0][0]) + if '_MSDK_SHIELD_' in json_object: + return json_object['_MSDK_SHIELD_'] + + return None + +def package_board_files(repo_root, files_root_dir, path_list, sdk_version): + asset_type = files_root_dir.split(os.sep)[-1] + os.makedirs(os.path.join(repo_root, f'tmp/assets/{asset_type}'), exist_ok=True) + + support.extract_archive_from_url( + 'https://github.com/MikroElektronika/core_packages/releases/latest/download/database.7z', + os.path.join(repo_root, 'tmp/db') + ) + + archive_list = {} + for each_path in path_list: + # Do not generate for generic boards + if 'generic' in each_path: + continue + + all_files_on_path = os.listdir(os.path.join(files_root_dir, each_path)) + shutil.copytree( + os.path.join(files_root_dir, each_path), + os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp/board/include/boards/{each_path}'), + dirs_exist_ok=True + ) + + with open(os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp/board/include/boards/{each_path}/board.cmake'), 'r') as file: + board_name, has_shield = extract_board_info(each_path, file.read()) + file.close() + + if has_shield: + # Check the database for the shield first + shield_path = check_database_for_shield( + os.path.join(repo_root, 'tmp/db/necto_db.db'), + board_name + ) + # If not found in database, search resource files next + if not shield_path: + board_query = json.loads(json.load(open(os.path.join(repo_root, f'resources/queries/boards/{each_path}/Boards.json'), 'r'))['sdk_config']) + if '_MSDK_SHIELD_' in board_query: + shield_path = board_query['_MSDK_SHIELD_'] + # Finally, if shield found, copy it to package as well + if shield_path: + shutil.copytree( + os.path.join(repo_root, f'bsp/board/include/shields/{shield_path}'), + os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp/board/include/shields/{shield_path}'), + dirs_exist_ok=True + ) + + display_name = None + display_names = read_data_from_db( + os.path.join(repo_root, 'tmp/db/necto_db.db'), + 'SELECT name FROM Boards WHERE sdk_config REGEXP ' + f'"{board_name}"' + ) + + if not display_names[0]: + display_name = json.load(open(os.path.join(repo_root, f'resources/queries/boards/{each_path}/Boards.json'), 'r'))['name'] + + icon = None + icon_root = f'https://raw.githubusercontent.com/MikroElektronika/mikrosdk_v2/mikroSDK-{sdk_version}/resources/' + icon = read_data_from_db( + os.path.join(repo_root, 'tmp/db/necto_db.db'), + 'SELECT icon FROM Boards WHERE sdk_config REGEXP ' + f'"{board_name}"' + ) + + if not icon[0]: + icon = icon_root + json.load(open(os.path.join(repo_root, f'resources/queries/boards/{each_path}/Boards.json'), 'r'))['icon'] + else: + leaf = icon[1][0][0].split('/')[-1] + if not leaf.startswith('board'): + icon = icon_root + icon[1][0][0].replace(leaf, f'board-{leaf}') + else: + icon = icon_root + icon[1][0][0] + + create_custom_archive( + os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp'), + os.path.join(repo_root, f'tmp/assets/{asset_type}/{each_path}.7z') + ) + os.chdir(repo_root) + + if display_name: + query_file = '\'{"package":"' + each_path + '"}\'' + archive_list.update( + { + display_name: + { + "name": board_name, + "display_name": display_name, + "type": "board", + "icon": icon, + "package_name": each_path, + "hash": hash_directory_contents(os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp')), + "category": "Board Package", + "package_rel_path": f'tmp/assets/{asset_type}/{each_path}.7z', + "install_location": f"%APPLICATION_DATA_DIR%/packages/sdk/mikroSDK_v2/src/bsp", + "db_query": f'UPDATE Boards SET installer_package = {query_file} WHERE name = \"{display_name}\"' + } + } + ) + else: + for each_display_name in display_names[1]: + query_file = '\'{"package":"' + each_path + '"}\'' + archive_list.update( + { + each_display_name[0]: + { + "name": board_name, + "display_name": each_display_name[0], + "type": "board", + "icon": icon, + "package_name": each_path, + "hash": hash_directory_contents(os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp')), + "category": "Board Package", + "package_rel_path": f'tmp/assets/{asset_type}/{each_path}.7z', + "install_location": f"%APPLICATION_DATA_DIR%/packages/sdk/mikroSDK_v2/src/bsp", + "db_query": f'UPDATE Boards SET installer_package = {query_file} WHERE name = \"{each_display_name[0]}\"' + } + } + ) + + shutil.rmtree(os.path.join(repo_root, f'tmp/assets/{asset_type}/bsp')) + + return archive_list + if __name__ == '__main__': parser = argparse.ArgumentParser(description="Upload directories as release assets.") parser.add_argument("token", help="GitHub Token") @@ -123,6 +347,7 @@ def hash_directory_contents(directory): print('Creating archive: %s' % archive_path) create_7z_archive(version, repo_dir, archive_path) print('Archive created successfully: %s' % archive_path) + metadata_content['mikrosdk'] = {'version': version} upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) print('Asset "%s" uploaded successfully to release ID: %s' % ('mikrosdk', release_id)) @@ -155,7 +380,27 @@ def hash_directory_contents(directory): upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) print('Asset "%s" uploaded successfully to release ID: %s' % ('queries', release_id)) - # BSP asset + # Package all boards as separate packages + packages = package_board_files( + repo_dir, + os.path.join(os.getcwd(), 'bsp/board/include/boards'), + os.listdir(os.path.join(os.getcwd(), 'bsp/board/include/boards')), + args.tag_name.replace('mikroSDK-', '') + ) + + # Update the metadata with package details + metadata_content.update( + { + "packages": packages + } + ) + + # Upload all the board packages + for each_package in packages: + current_package_data = packages[each_package] + upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{current_package_data['package_rel_path']}'), args.token) + + # BSP asset for internal MIKROE tools archive_path = os.path.join(repo_dir, 'bsps.7z') print('Creating archive: %s' % archive_path) zip_bsp_related_files(archive_path, repo_dir) diff --git a/scripts/support.py b/scripts/support.py index 754e21952..105978864 100644 --- a/scripts/support.py +++ b/scripts/support.py @@ -36,7 +36,7 @@ def determine_archive_type(byte_stream): else: return '7z' -def extract_archive_from_url(url, destination, token): +def extract_archive_from_url(url, destination, token=None): """ Extract the contents of an archive (7z or zip) from a URL directly in memory, without downloading the file. diff --git a/templates/new_hw.md b/templates/new_hw.md new file mode 100644 index 000000000..237245de0 --- /dev/null +++ b/templates/new_hw.md @@ -0,0 +1,29 @@ +

+ MikroElektronika +

+ +--- + +**[BACK TO PREVIOUS FILE](../changelog.md)** + +--- + +# `DATE` + +## Changes + +- [`DATE`](#date) + - [Changes](#changes) + - [NEW HARDWARE](#new-hardware) + +### NEW HARDWARE + +Support added for following hardware: + ++ ADD HW HERE + +--- + +**[BACK TO PREVIOUS FILE](../changelog.md)** + +---