使用
https://github.com/aws-samples/aws-codepipeline-bitbucket-integration
中的步骤尝试将 CodePipeline 与本地 Bitbucket 服务器集成。
当我将代码推送到存储库时,出现以下错误。我不确定我还缺少什么。
*`File downloaded successfully: <ref *2> IncomingMessage {
_events:
close: [Function: onclose],
error: [Function: onerror],
data: undefined,
end: [ [Function: responseOnEnd], [Function: onend] ],
readable: undefined,
finish: [Function: onfinish]
WARN Are you using a Stream of unknown length as the Body of a PutObject request? Consider using Upload instead from @aws-sdk/lib-storage.
WARN An error was encountered in a non-retryable streaming request.
INFO Exiting with error NotImplemented: A header you provided implies functionality that is not implemented
at throwDefaultError (/var/runtime/node_modules/@aws-sdk/node_modules/@smithy/smithy-client/dist-cjs/index.js:838:20)
at /var/runtime/node_modules/@aws-sdk/node_modules/@smithy/smithy-client/dist-cjs/index.js:847:5
at de_CommandError (/var/runtime/node_modules/@aws-sdk/client-s3/dist-cjs/index.js:4756:14)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async /var/runtime/node_modules/@aws-sdk/node_modules/@smithy/middleware-serde/dist-cjs/index.js:35:20
at async /var/runtime/node_modules/@aws-sdk/middleware-signing/dist-cjs/index.js:225:18
at async /var/runtime/node_modules/@aws-sdk/node_modules/@smithy/middleware-retry/dist-cjs/index.js:320:38
at async /var/runtime/node_modules/@aws-sdk/middleware-flexible-checksums/dist-cjs/index.js:173:18
at async /var/runtime/node_modules/@aws-sdk/middleware-sdk-s3/dist-cjs/index.js:97:20
at async /var/runtime/node_modules/@aws-sdk/middleware-sdk-s3/dist-cjs/index.js:120:14 {
'$fault': 'client',
'$metadata':
httpStatusCode: 501,
requestId: 'G64RMGHEPWVDE',
extendedRequestId: 'FTZNy/uEqwgkrWH3XP/QIIiCIZAc5f+ZxCG3tFJp7QhFrCopRunG5ScmqfMxbpWus3o6cuXoC1M=',
cfId: undefined
Code: 'NotImplemented',
Header: 'Transfer-Encoding',
RequestId: 'G64RMGHYPWVDE',
HostId: 'FTZNy/uEqwgkrWH3XP/QIIiCIZAc5f3tFJp7QhFrCopRunG5ScmqfMxbpWus3o6cuXoC1M='*
我已更新存储桶策略和 IAM 角色。
桶政策:
{
"Version": "2008-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"AWS": "arn:aws:iam::xxxxxxxxx:role/lambda-executionrole"
},
"Action": [
"s3:Get*",
"s3:List*",
"s3:Put*",
"s3:Delete*"
],
"Resource": [
"arn:aws:s3:::bucketname",
"arn:aws:s3:::bucketname/*"
]
}
]
}
Lambda IAM 具有 S3 完全访问权限和附加的
AWSLambdaVPCAccessExecutionRole
托管策略,并且自定义策略具有以下权限:
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetBucket*",
"s3:ListBucket*"
],
"Resource": [
"arn:aws:s3:::bucketname"
],
"Effect": "Allow",
"Sid": "S3BucketPolicy"
},
{
"Action": [
"s3:AbortMultipartUpload",
"s3:GetObject*",
"s3:PutObject*",
"s3:DeleteObject*",
"s3:RestoreObject",
"s3:ListMultipartUploadParts"
],
"Resource": [
"arn:aws:s3:::bucketname/*"
],
"Effect": "Allow"
},
{
"Action": [
"kms:decrypt"
],
"Resource": [
"arn:aws:kms:us-east-1:xxxxxxxxx:key/xxxxxxxxxxxxx9"
],
"Effect": "Allow"
}
]
}
这是我的代码:
import {S3Client, PutObjectCommand} from "@aws-sdk/client-s3"
import axios from "axios"
import crypto from 'node:crypto';
// Initialize S3 client
const s3Client = new S3Client({ region: process.env.AWS_REGION });
/**
* Convert an object keys to lowercase
* @param {object} request - this is a object to convert the keys to lowercase
* @returns {object} - return a new object with keys in lower case
*/
function normalizeObject(inputObject) {
console.log('info', '>>> normalizeObject()');
const requestKeys = Object.keys(inputObject);
let outputObject = {};
for (let i = 0; i < requestKeys.length; i++) {
outputObject[requestKeys[i].toLowerCase()] = inputObject[requestKeys[i]];
}
console.log('info', '<<< normalizeObject()');
return outputObject;
}
/**
* Download the repository content as a zip file
* @param {object} repoConfig - this is a object containing the config for the repository
* @param {object} proxy - this is a object containing the web proxy configuration
* @returns {stream} - return a stream containing the repository zip file
*/
async function downloadFile(repoConfig, proxy) {
console.log('info', '>>> downloadFile()');
console.log(`proxy: ${JSON.stringify(proxy)}`);
const params = {
proxy,
method: 'get',
baseURL: repoConfig.serverUrl,
url: `/rest/api/latest/projects/${repoConfig.projectName}/repos/${repoConfig.repoName}/archive?at=refs/heads/${repoConfig.branch}&format=zip`,
responseType: 'stream',
headers: {
Authorization: `Bearer ${repoConfig.token}`
}
};
console.log('Request params:', JSON.stringify(params, null, 2));
try {
const resp = await axios.request(params);
console.log('info', '<<< downloadFile()');
return resp.data;
}
catch (err) {
console.log('error', err);
throw new Error(err);
}
}
/**
* Check BitBucket Server Signature
* @param {string} signingSecret - this is the signing secret for the BitBucket Server webhook
* @param {string} signature - this is the signatured applied by BitBucket to the message
* @param {object} body - this is the message body
* @returns {boolean} - return true or false
*/
function checkSignature(signingSecret, signature, body) {
console.log('info', '>>> signingSecret()');
const hash = crypto.createHmac('sha256', signingSecret).update(body).digest('hex');
const signatureHash = signature.split('=');
if (signatureHash[1] === hash) {
console.log('info', '<<< signingSecret()');
return true;
}
console.log('info', '<<< signingSecret()');
return false;
}
/**
* Generate a response for API Gateway
* @param {string} statusCode - HTTP status code to return
* @param {string} detail - this is message detail to return
* @returns {object} - return the formatted response object
*/
function responseToApiGw(statusCode, detail) {
if (!statusCode) {
throw new TypeError('responseToApiGw() expects at least argument statusCode');
}
if (statusCode !== '200' && !detail) {
throw new TypeError('responseToApiGw() expects at least arguments statusCode and detail');
}
let body = {};
if (statusCode === '200' && detail) {
body = {
statusCode: statusCode,
message: detail
};
} else if (statusCode === '200' && !detail) {
body = {
statusCode: statusCode
};
} else {
body = {
statusCode: statusCode,
fault: detail
};
}
let response = {
statusCode: statusCode,
body: JSON.stringify(body),
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, GET',
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept'
}
};
return response;
}
export const handler = async (event) => {
try {
console.log(`Incoming event: ${JSON.stringify(event)}`);
const eventBody = JSON.parse(event.body);
console.log('eventBody');
console.log(eventBody);
// Normalize headers
const normalizedHeaders = normalizeObject(event.headers);
// Respond to test event
if ('x-event-key' in normalizedHeaders && normalizedHeaders['x-event-key'] === 'diagnostics:ping') {
return responseToApiGw(200, 'Webhook configured successfully');
}
// Validate message signature
if (!(checkSignature(process.env.BITBUCKET_SECRET, normalizedHeaders['x-hub-signature'], event.body))) {
console.log('Invalid webhook message signature');
return responseToApiGw(401, 'Signature is not valid');
}
console.log('Signature validated successfully');
if (!(eventBody.changes[0].ref.type === 'BRANCH')) {
console.log('Invalid event type');
throw new Error('Invalid event type');
}
const repoConfig = {
serverUrl: process.env.BITBUCKET_SERVER_URL,
projectName: eventBody.repository.project.key,
repoName: eventBody.repository.name,
branch: eventBody.changes[0].ref.displayId,
token: process.env.BITBUCKET_TOKEN
};
let proxy;
if (process.env.WEBPROXY_HOST && process.env.WEBPROXY_PORT) {
proxy = {
host: process.env.WEBPROXY_HOST,
port: process.env.WEBPROXY_PORT
};
}
// Download the repository package from Bitbucket Server
const file = await downloadFile(repoConfig, proxy);
console.log('File downloaded successfully:', file);
// Create a PutObjectCommand with the zipped repository package key and file
const uploadCommand = new PutObjectCommand({
Bucket: process.env.S3BUCKET,
Key: `${repoConfig.projectName}/${repoConfig.repoName}/${repoConfig.branch}.zip`,
Body: file,
ServerSideEncryption: "AES256"
});
// Upload the repository package to S3 bucket
// await s3Client.send(uploadCommand).promise();
const s3Upload = await s3Client.send(uploadCommand);
console.log(s3Upload);
console.log('Exiting successfully');
return responseToApiGw(200, 'success');
}
catch (err) {
console.log('Exiting with error', err);
return responseToApiGw(500, 'Some weird thing happened');
}
}
它起作用了。添加以下代码并确保您在存储桶策略和 Lambda 执行角色中拥有 s3 权限
// 使用压缩的存储库包密钥和文件创建上传 常量上传=新上传({ 客户端:s3Client, 参数:{ 存储桶:process.env.S3BUCKET, 键:
${repoConfig.projectName}/${repoConfig.repoName}/${repoConfig.branch}.zip
,
正文:文件,
服务器端加密:“AES256”
}
});
// 监控上传过程
upload.on("httpUploadProgress", (进度) => {
console.log(进度);