Skip to content

Commit

Permalink
Merge pull request #8 from NotCoffee418/dev
Browse files Browse the repository at this point in the history
v1.2
  • Loading branch information
NotCoffee418 authored Aug 25, 2023
2 parents 6ab1a72 + 81cb104 commit 8bc1250
Show file tree
Hide file tree
Showing 7 changed files with 62,712 additions and 67,664 deletions.
4 changes: 4 additions & 0 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"

npx ncc build index.js -o release && git add release/index.js
2 changes: 1 addition & 1 deletion action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ runs:
main: 'release/index.js'
dependencies:
- '@actions/core'
- 'aws-sdk'
- '@aws-sdk/client-s3'
- 'archiver'
- 'fs'
- 'path'
Expand Down
2 changes: 1 addition & 1 deletion debug/debug.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ if (param.toLowerCase() === 'zip') {
process.env.SOURCE_PATH = './debug/debugdata/dog1.jpg'
process.env.DEST_FILE = 'debug-override/debug-dog.jpg'
}
process.env.ZIP_PATH = './debug/debugdata.zip';
//process.env.ZIP_PATH = './debug/debugdata.zip';

(async () => {
// Pass all environment variables into the exec() function
Expand Down
88 changes: 57 additions & 31 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
const core = require('@actions/core')
const path = require('path')
const os = require('os')
const fs = require('fs')
const archiver = require('archiver')
const AWS = require('aws-sdk')
const { S3Client } = require('@aws-sdk/client-s3')
const { Upload } = require('@aws-sdk/lib-storage')
require('dotenv').config()

async function main () {
// Load data from environment variables
// Load data from environment variables
let cleanupFiles = []
try {
const {
SOURCE_PATH = null,
Expand All @@ -16,7 +19,7 @@ async function main () {
AWS_SECRET_KEY = null,
AWS_REGION = 'eu-central-1',
S3_ENDPOINT = null,
ZIP_PATH = './tmp.zip', // Temporary zip file. Will not be removed automatically
ZIP_PATH = path.join(os.tmpdir(), 'tmp.zip'),
SOURCE_MODE = 'ZIP' // ZIP, FILE
} = process.env

Expand Down Expand Up @@ -59,14 +62,24 @@ async function main () {

// Compress directory if needed
if (SOURCE_MODE === modes.ZIP) {
console.log(`Creating zip file of directory ${path.resolve(SOURCE_PATH)}`)
console.log(`Creating zip file of directory ${path.resolve(SOURCE_PATH)} at ${path.resolve(ZIP_PATH)}`)
try {
cleanupFiles.push(ZIP_PATH)
const archive = archiver('zip', { zlib: { level: 9 } })
const stream = fs.createWriteStream(ZIP_PATH)
await new Promise((resolve, reject) => {
archive
.directory(SOURCE_PATH, false)
.on('error', err => reject(err))
.directory(SOURCE_PATH, false).directory(SOURCE_PATH, false)
.on('error', err => {
console.error('Error inside archive:', err)
reject(err)
})
.on('warning', warning => {
console.warn('Warning:', warning)
})
.on('entry', entry => {
console.log('Archiving:', entry.name)
})
.pipe(stream)

stream.on('close', () => resolve())
Expand All @@ -77,54 +90,67 @@ async function main () {
throw err
}
}

// Init S3
console.log(`Initializing S3 upload to bucket "${BUCKET_NAME}"`)
console.log(`Initializing S3 upload to bucket "${BUCKET_NAME}"`);
const s3Config = {
apiVersion: '2006-03-01',
accessKeyId: AWS_SECRET_ID,
secretAccessKey: AWS_SECRET_KEY,
region: AWS_REGION
}
apiVersion: "2006-03-01",
credentials: {
accessKeyId: AWS_SECRET_ID,
secretAccessKey: AWS_SECRET_KEY,
},
region: AWS_REGION,
};
if (S3_ENDPOINT) {
s3Config.endpoint = S3_ENDPOINT
s3Config.endpoint = S3_ENDPOINT;
}
const s3 = new AWS.S3(s3Config)
const s3 = new S3Client(s3Config);

// Upload file
const fileToUpload = SOURCE_MODE === modes.ZIP ? ZIP_PATH : SOURCE_PATH
const fileToUpload = SOURCE_MODE === modes.ZIP ? ZIP_PATH : SOURCE_PATH;
let readStream
try {
readStream = fs.createReadStream(fileToUpload)
readStream = fs.createReadStream(fileToUpload);
} catch (err) {
console.log(`Failed to read file "${fileToUpload}"`)
console.log(`Failed to read file "${fileToUpload}"`);
throw err
}

const req = {
Body: readStream,
Bucket: BUCKET_NAME,
Key: DEST_FILE
Key: DEST_FILE,
}

console.log(`Uploading zip to "${BUCKET_NAME}" as "${DEST_FILE}"`)
console.log(`Uploading zip to "${BUCKET_NAME}" as "${DEST_FILE}"`);

// Use the managed upload feature of the SDK to upload the stream
const upload = new AWS.S3.ManagedUpload({
params: req,
service: s3
const upload = new Upload({
client: s3,
params: req
})

upload.send((err, data) => {
if (err) {
console.log(`Failed upload to ${BUCKET_NAME}`)
throw Error(`S3 Upload error: ${err}`)
} else {
console.log(`Succesful upload to ${BUCKET_NAME}`)
}
})
try {
await upload.done();
console.log(`Succesful upload to ${BUCKET_NAME}`);
} catch (err) {
console.log(`Failed upload to ${BUCKET_NAME}`);
throw Error(`S3 Upload error: ${err}`);
}
} catch (error) {
core.setFailed(error.message)
} finally {
try {
//cleanup temp files
cleanupFiles.forEach(file => {
if (fs.existsSync(file)) {
fs.unlinkSync(file)
}
})
} catch (err) {
console.error('An error occurred while cleaning up')
console.error(err)
}
}
}

Expand Down
Loading

0 comments on commit 8bc1250

Please sign in to comment.