Skip to content

Commit

Permalink
new pipeline for codegen (#3375)
Browse files Browse the repository at this point in the history
* new pipeline for codegen

* bugfix: taskresult is required, check exist before prepare sourcecode

* Simplify Judgment Conditions

* delete unnecessary variable

* separate artifacts from total one to three part

* move mkdir out of loop for it has the same path

* may not generate artifact so event task failed, the pipeline should continue next task

* use pr number to checkout swagger repo

* [bugfix] fix missed parameter

* delete test code

* pipeline support use swagger pr to checkout spec repo

* try upload logs file to artifact even sdk generation failed

* debug for pipeline

* build dpg

* codegen pipeline support java dpg

* add release pipeline name and build type in result

* use azure-sdk account for create sdk pr

* support add ex cmd for docker

* event generate and build stage failed, try to upload source code and artifact

* don't exit 1 event generate cmd return 1

* use one token for all git cmd

* set sdk url by variable

* use env variable to control whether to show debug info

* remove hard code branch

* roll back for debug

* test

* set basedir for git wapper

* lint fix

* update js ci test repo commitid

* lint fix
  • Loading branch information
zzvswxy authored Aug 10, 2022
1 parent a864916 commit 5099fa9
Show file tree
Hide file tree
Showing 19 changed files with 683 additions and 402 deletions.
Original file line number Diff line number Diff line change
@@ -1,19 +1,35 @@
#!/usr/bin/env node
import {
CompletedEvent,
GenerateAndBuildOutput,
generateTotalResult,
getGenerateAndBuildOutput,
getTaskResults,
InProgressEvent,
logger,
PipelineRunEvent,
requireJsonc,
SDK,
TaskResultStatus
SDKPipelineStatus,
TaskResult,
Trigger
} from '@azure-tools/sdk-generation-lib';
import * as fs from 'fs';
import * as path from 'path';

import { PrepareArtifactFilesInput, prepareArtifactFilesInput } from '../../cliSchema/prepareArtifactFilesCliConfig';
import {
PrepareArtifactFilesInput,
prepareArtifactFilesInput,
PrepareResultArtifactInput,
prepareResultArtifactInput
} from '../../cliSchema/prepareArtifactFilesCliConfig';
import { GitOperationWrapper } from '../../utils/GitOperationWrapper';

function copyFile(filePath: string, targetDir: string) {
if (!fs.existsSync(filePath)) {
logger.info(`${filePath} isn't exist, skipped it.`);
return;
}
const fileDir = path.dirname(filePath);
fs.mkdirSync(`${targetDir}/${fileDir}`, { recursive: true });
fs.copyFileSync(`${filePath}`, `${targetDir}/${filePath}`);
Expand All @@ -24,13 +40,7 @@ async function prepareSourceCode(
language: string,
artifactDir: string
) {
const gitOperationWrapper: GitOperationWrapper = new GitOperationWrapper();
for (const p of generateAndBuildOutput.packages) {
const result = p.result;
if (result === TaskResultStatus.Failure) {
logger.warn(`Build ${p.packageName} failed, skipped it`);
continue;
}
const packageName = p.packageName;
const packagePaths = p.path;

Expand All @@ -41,40 +51,41 @@ async function prepareSourceCode(
}

if (fs.lstatSync(packagePath).isDirectory()) {
const gitOperationWrapper: GitOperationWrapper = new GitOperationWrapper(packagePath);

for (const filePath of await gitOperationWrapper.getFileListInPackageFolder(packagePath)) {
copyFile(`${path.join(packagePath, filePath)}`, `${artifactDir}/${language}/${packageName}`);
copyFile(
`${path.join(packagePath, filePath)}`,
`${artifactDir}/${language}/sourceCode/${packageName}`
);
}
} else {
copyFile(packagePath, `${artifactDir}/${language}/${packageName}`);
copyFile(packagePath, `${artifactDir}/${language}/sourceCode/${packageName}`);
}
}
}
}

async function prepareArtifacts(generateAndBuildOutput: GenerateAndBuildOutput, language: string, artifactDir: string) {
for (const p of generateAndBuildOutput.packages) {
const result = p.result;
if (result === TaskResultStatus.Failure) {
logger.warn(`Build ${p.packageName} failed, skipped it`);
continue;
}
const artifacts = p.artifacts;
if (!artifacts) {
// artifacts is optional
continue;
}

fs.mkdirSync(`${artifactDir}/${language}/artifact`, { recursive: true });
for (const artifact of artifacts) {
const artifactName = path.basename(artifact);
fs.mkdirSync(`${artifactDir}/${language}`, { recursive: true });
fs.copyFileSync(`${artifact}`, `${artifactDir}/${language}/${artifactName}`);
fs.copyFileSync(`${artifact}`, `${artifactDir}/${language}/artifact/${artifactName}`);
}
}
}

function validateInput(config: PrepareArtifactFilesInput) {
if (!fs.existsSync(config.generateAndBuildOutputFile)) {
throw new Error(`generateAndBuildOutputFile:${config.generateAndBuildOutputFile} isn's exist!`);
logger.error(`generateAndBuildOutputFile:${config.generateAndBuildOutputFile} isn's exist!`);
process.exit(0);
}
if (!fs.existsSync(config.artifactDir)) {
throw new Error(`Invalid artifactDir:${config.artifactDir}!`);
Expand All @@ -84,7 +95,7 @@ function validateInput(config: PrepareArtifactFilesInput) {
}
}

async function main() {
async function prepareSourceCodeAndArtifacts() {
prepareArtifactFilesInput.validate();
const config: PrepareArtifactFilesInput = prepareArtifactFilesInput.getProperties();

Expand All @@ -97,6 +108,96 @@ async function main() {
await prepareArtifacts(generateAndBuildOutput, config.language, config.artifactDir);
}

function validateResultInput(config: PrepareResultArtifactInput) {
if (!fs.existsSync(config.artifactDir)) {
throw new Error(`Invalid artifactDir:${config.artifactDir}!`);
}
}

function getTrigger(config: PrepareResultArtifactInput): Trigger {
let trigger: Trigger;
try {
trigger = JSON.parse(config.trigger);
} catch (error) {
logger.error(`Wrong json format:` + config.trigger);
throw new Error(error);
}

return trigger;
}

function prepareResult(pipelineStatus: SDKPipelineStatus) {
prepareResultArtifactInput.validate();
const config: PrepareResultArtifactInput = prepareResultArtifactInput.getProperties();

validateResultInput(config);
const trigger: Trigger = getTrigger(config);
let event: PipelineRunEvent = undefined;

switch (pipelineStatus) {
case 'in_progress':
event = {
status: 'in_progress',
trigger: trigger,
pipelineBuildId: config.pipelineBuildId
} as InProgressEvent;
break;
case 'completed':
if (!config.resultsPath || !config.logPath) {
throw new Error(`Invalid completed event parameter!`);
}

const taskResults: TaskResult[] = getTaskResults(config.resultsPath);
const taskTotalResult: TaskResult = generateTotalResult(taskResults, config.pipelineBuildId);
event = {
status: 'completed',
trigger: trigger,
pipelineBuildId: config.pipelineBuildId,
logPath: config.logPath,
result: taskTotalResult
} as CompletedEvent;
break;
default:
throw new Error(`Unsupported status: ` + (pipelineStatus as string));
}

fs.writeFileSync(config.artifactDir + `/` + pipelineStatus + `/result.json`, JSON.stringify(event, null, 2), {
encoding: 'utf-8'
});
}

async function main() {
const args = parseArgs(process.argv);
const pipelineStatus = args['pipelineStatus'];

if (pipelineStatus === 'completed') {
prepareResult(pipelineStatus);
await prepareSourceCodeAndArtifacts();
} else if (pipelineStatus === 'in_progress') {
prepareResult(pipelineStatus);
} else {
throw new Error(`Unknown pipelineStatus:${pipelineStatus}!`);
}
}

/**
* Parse a list of command line arguments.
* @param argv List of cli args(process.argv)
*/
const flagRegex = /^--([^=:]+)([=:](.+))?$/;
export function parseArgs(argv: string[]) {
const result: any = {};
for (const arg of argv) {
const match = flagRegex.exec(arg);
if (match) {
const key = match[1];
const rawValue = match[3];
result[key] = rawValue;
}
}
return result;
}

main().catch((e) => {
logger.error(`${e.message}
${e.stack}`);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {
CodeGeneration,
CompletedEvent,
generateTotalResult,
getTaskResults,
InProgressEvent,
logger,
MongoConnectContext,
Expand Down Expand Up @@ -123,19 +124,6 @@ function getTrigger(config: ResultPublisherEventHubInput): Trigger {
return trigger;
}

function getTaskResults(taskResultsPath: string): TaskResult[] {
const taskResultsPathArray = JSON.parse(taskResultsPath);
const taskResults: TaskResult[] = [];
for (const taskResultPath of taskResultsPathArray) {
if (fs.existsSync(taskResultPath)) {
taskResults.push(requireJsonc(taskResultPath));
} else {
logger.warn(`${taskResultPath} isn't exist, skip.`);
}
}
return taskResults;
}

async function publishEventhub(pipelineStatus: SDKPipelineStatus) {
resultPublisherEventHubInput.validate();
const config: ResultPublisherEventHubInput = resultPublisherEventHubInput.getProperties();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export const generateResultCliInput = convict<GenerateResultCliInput>({
exeResult: {
default: null,
nullable: true,
format: ['success', 'failure'],
format: ['succeeded', 'failed'],
arg: 'exeResult'
},
taskOutputPath: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,43 @@ export const prepareArtifactFilesInput = convict<PrepareArtifactFilesInput>({
arg: 'language'
}
});

export class PrepareResultArtifactInput {
pipelineBuildId: string;
trigger: string;
artifactDir: string;
logPath?: string;
resultsPath?: string;
}

export const prepareResultArtifactInput = convict<PrepareResultArtifactInput>({
pipelineBuildId: {
default: null,
format: assertNullOrEmpty,
arg: 'buildId'
},
trigger: {
default: null,
format: assertNullOrEmpty,
arg: 'trigger'
},
artifactDir: {
doc: 'The dir to publish artifact',
default: null,
format: assertNullOrEmpty,
arg: 'artifactDir'
},
logPath: {
default: null,
nullable: true,
format: String,
arg: 'logPath'
},
resultsPath: {
doc: 'task result files array',
default: null,
nullable: true,
format: String,
arg: 'resultsPath'
}
});
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import * as process from 'process';

const repoCommitId = {
'azure-rest-api-specs': '0baca05c851c1749e92beb0d2134cd958827dd54',
'azure-sdk-for-js': '67946c5b0ce135f58ecfeab1443e5be52604908e',
'azure-sdk-for-js': '57382229a700e0e6f607d6ac0811379a6254f3d9',
'azure-sdk-for-java': '307df24267304fbf3947025bef7eaf9698410de8',
'azure-sdk-for-python': '53f66170cc47739204cedfe0a46989290c047c98',
'azure-sdk-for-go': '241bdb849ce431e1a5e398a5649cde93149ee374',
Expand All @@ -33,20 +33,25 @@ async function prepareRepo(currentPath: string, repoName: string) {
stdio: 'inherit'
});

if (!!repoCommitId[repoName] && execSync(`git rev-parse HEAD`, {
encoding: 'utf-8',
cwd: path.join(tmpFolder, repoName)
}).trim() !== repoCommitId[repoName]) {
if (
!!repoCommitId[repoName] &&
execSync(`git rev-parse HEAD`, {
encoding: 'utf-8',
cwd: path.join(tmpFolder, repoName)
}).trim() !== repoCommitId[repoName]
) {
execSync(`git checkout ${repoCommitId[repoName]}`, {
cwd: path.join(tmpFolder, repoName),
stdio: 'inherit'
});
}

if (execSync(`git rev-parse --abbrev-ref HEAD`, {
encoding: 'utf-8',
cwd: path.join(tmpFolder, repoName)
}).trim() !== integrationBranch) {
if (
execSync(`git rev-parse --abbrev-ref HEAD`, {
encoding: 'utf-8',
cwd: path.join(tmpFolder, repoName)
}).trim() !== integrationBranch
) {
execSync(`git switch -c ${integrationBranch}`, {
cwd: path.join(tmpFolder, repoName),
stdio: 'inherit'
Expand All @@ -57,9 +62,15 @@ async function prepareRepo(currentPath: string, repoName: string) {
async function runDocker(currentPath: string, sdkRepoName: string, dockerImage: string) {
const tmpFolder = path.join(currentPath, 'tmp');
// eslint-disable-next-line max-len
execSync(`docker run -v ${path.join(tmpFolder, 'azure-rest-api-specs')}:/spec-repo -v ${path.join(tmpFolder, sdkRepoName)}:/sdk-repo ${dockerImage} --readme=specification/agrifood/resource-manager/readme.md`, {
stdio: 'inherit'
});
execSync(
`docker run -v ${path.join(tmpFolder, 'azure-rest-api-specs')}:/spec-repo -v ${path.join(
tmpFolder,
sdkRepoName
)}:/sdk-repo ${dockerImage} --readme=specification/agrifood/resource-manager/readme.md`,
{
stdio: 'inherit'
}
);
}

async function buildDockImage(rushCwd: string, dockerCwd: string) {
Expand All @@ -80,7 +91,9 @@ export async function main(options: any) {
options['docker-image'] = defaultImageName;
}
if (!options['sdk-repo']) {
options['sdk-repo'] = Object.keys(repoCommitId).filter((ele) => ele !== 'azure-rest-api-specs').join(',');
options['sdk-repo'] = Object.keys(repoCommitId)
.filter((ele) => ele !== 'azure-rest-api-specs')
.join(',');
}
await prepareRepo(currentPath, 'azure-rest-api-specs');
for (const sdkRepo of options['sdk-repo'].split(',')) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ export function parseGenerateLog(
pipelineBuildId: string,
taskName: string,
logfile: string,
logFilter: LogFilter
logFilter: LogFilter,
taskExeResult: TaskResultStatus
): TaskResultCommon {
let errorNum = 0;
let warnNum = 0;
Expand Down Expand Up @@ -86,7 +87,8 @@ export function parseGenerateLog(
pipelineBuildId: pipelineBuildId,
errorCount: errorNum,
warningCount: warnNum,
messages: messages
messages: messages,
result: taskExeResult
};

return result;
Expand All @@ -110,7 +112,7 @@ export function createTaskResult(
warningCount: 0
};
} else {
commonResult = parseGenerateLog(pipelineBuildId, taskname, logfile, logFilter);
commonResult = parseGenerateLog(pipelineBuildId, taskname, logfile, logFilter, taskExeResult);
}
if (taskname === AzureSDKTaskName.MockTest || taskname === AzureSDKTaskName.LiveTest) {
if (taskOutput === undefined) {
Expand Down
Loading

0 comments on commit 5099fa9

Please sign in to comment.