Skip to content

Commit

Permalink
feat!(uploadToS3): add useHash option
Browse files Browse the repository at this point in the history
BREAKING CHANGE: drop support for Node < 18
  • Loading branch information
jedwards1211 committed May 14, 2024
1 parent dd788d8 commit cfffe0e
Show file tree
Hide file tree
Showing 5 changed files with 89 additions and 35 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@ Positionals:
Options:
--version Show version number [boolean]
--help Show help [boolean]
--hash compute hash and skip if already uploaded [boolean]
```

### Example Output
Expand Down Expand Up @@ -240,6 +242,11 @@ export async function uploadToS3(options: {
* The S3 key to upload to. Defaults to `lambda/node/${packageName}/${filename}`
*/
Key?: string
/**
* If true, compute hash and append the hash to the filename; check if the S3 key
* already exists, and if so, skip upload.
*/
useHash?: boolean
}): Promise<{
/**
* The files that were packed (relative to packageDir)
Expand Down
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"description": "Great .zip packager and S3 uploader for AWS Lambda",
"sideEffects": false,
"bin": {
"pack-lambda": "./bin/index.js"
"pack-lambda": "./dist/bin/index.js"
},
"scripts": {
"prepack": "tc build && ln -sf ../node_modules dist/node_modules",
Expand Down Expand Up @@ -47,7 +47,7 @@
"@types/chai": "^4.3.5",
"@types/fs-extra": "^9.0.13",
"@types/mocha": "^10.0.1",
"@types/node": "^16.10.2",
"@types/node": "^18.0.0",
"@types/npm-package-arg": "^6.1.1",
"@types/npm-packlist": "^1.1.2",
"@types/pacote": "^11.1.1",
Expand Down Expand Up @@ -96,7 +96,7 @@
}
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"packageManager": "[email protected]"
}
32 changes: 16 additions & 16 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 8 additions & 1 deletion src/bin/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ type WriteZipOptions = {
type UploadOptions = {
bucket: string
key?: string
hash?: boolean
}

yargs(process.argv.slice(2))
Expand Down Expand Up @@ -48,15 +49,21 @@ yargs(process.argv.slice(2))
describe: 'S3 Bucket[/Key]',
demandOption: true,
})
.positional('key', { describe: 'S3 Key' }),
.positional('key', { describe: 'S3 Key' })
.option('hash', {
describe: 'compute hash and skip if already uploaded',
type: 'boolean',
}),
({
bucket,
key,
hash,
}: // eslint-disable-next-line @typescript-eslint/no-explicit-any
Arguments<UploadOptions>): Promise<any> =>
uploadToS3({
Bucket: bucket,
Key: key,
useHash: hash,
})
)
.demandCommand()
Expand Down
70 changes: 55 additions & 15 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,12 @@ import runScript from '@npmcli/run-script'
import fs from 'fs-extra'
import chalk from 'chalk'
import stream from 'stream'
import { pipeline } from 'stream/promises'
import Path from 'path'
import emitted from 'p-event'
import packlist from './packlist'
import crypto from 'crypto'
import { HeadObjectCommand } from '@aws-sdk/client-s3'

type CreateArchiveResult = {
archive: Archiver
Expand All @@ -18,8 +21,10 @@ type CreateArchiveResult = {

export async function createArchive({
packageDir,
includeTimestamp,
}: {
packageDir: string
includeTimestamp?: boolean
}): Promise<CreateArchiveResult> {
const packageJsonFile = Path.join(packageDir, 'package.json')
const rawPackageJson = await fs.readFile(packageJsonFile)
Expand All @@ -44,7 +49,8 @@ export async function createArchive({
})

const filename = `${manifest.name}-${manifest.version}${
manifest.version.endsWith('-development')
includeTimestamp === true ||
(includeTimestamp !== false && manifest.version.endsWith('-development'))
? `-${new Date().toISOString().replace(/\D/g, '')}`
: ''
}.zip`
Expand All @@ -59,6 +65,7 @@ export async function createArchive({
for (const [from, { target, mode }] of symlinks.entries()) {
archive.symlink(from, target, mode)
}
archive.finalize()
return { archive, files: [...files], bundled, filename, manifest }
}

Expand Down Expand Up @@ -123,7 +130,7 @@ export async function writeZip({
await fs.mkdirs(Path.dirname(filename))
const writeStream = fs.createWriteStream(filename)
archive.pipe(writeStream)
await Promise.all([emitted(writeStream, 'close'), archive.finalize()])
await emitted(writeStream, 'close')
// eslint-disable-next-line no-console
console.error(Path.relative(process.cwd(), filename))
}
Expand All @@ -143,31 +150,64 @@ export async function uploadToS3({
packageDir = process.cwd(),
Bucket: bucket,
Key: key,
useHash,
}: {
packageDir?: string
Bucket: string
Key?: string
useHash?: boolean
}): Promise<UploadToS3Result> {
const { archive, filename, files, bundled, manifest } = await createArchive({
const { S3Client } = await import('@aws-sdk/client-s3')
const client = new S3Client({})

const createArchiveResult = await createArchive({
packageDir,
includeTimestamp: !useHash,
})
const { archive, files, bundled, manifest } = createArchiveResult
let { filename } = createArchiveResult

const parts = bucket.replace(/^s3:\/\//, '').split(/\//)
const Bucket = parts[0]
const Key = key || parts[1] || `lambda/node/${manifest.name}/${filename}`
const { S3Client } = await import('@aws-sdk/client-s3')
const { Upload } = await import('@aws-sdk/lib-storage')
printDetails({ filename, files, bundled, manifest })
let Key = key || parts[1] || `lambda/node/${manifest.name}/${filename}`

const upload = new Upload({
client: new S3Client({}),
params: { Bucket, Key, Body: archive.pipe(new stream.PassThrough()) },
})
let alreadyExists = false

process.stderr.write(`Uploading to s3://${Bucket}/${Key}...`)
upload.on('httpUploadProgress', () => process.stderr.write('.'))
await Promise.all([upload.done(), archive.finalize()])
if (useHash) {
const hash = crypto.createHash('SHA-256')
for (const file of files.sort()) {
if (file.startsWith('node_modules')) continue
await pipeline(fs.createReadStream(Path.join(packageDir, file)), hash, {
end: false,
})
}
const digest = hash.digest('hex')
filename = filename.replace(/\.zip$/, `-${digest}.zip`)
Key = Key.replace(/\.zip$/, `-${digest}.zip`)
try {
await client.send(new HeadObjectCommand({ Bucket, Key }))
alreadyExists = true
console.error(`✅ Bundle already exists: s3://${Bucket}/${Key}\n`)
} catch (error) {
// ignore
}
}

if (!alreadyExists) {
const { Upload } = await import('@aws-sdk/lib-storage')
printDetails({ filename, files, bundled, manifest })

process.stderr.write(`done\n`)
const upload = new Upload({
client,
params: { Bucket, Key, Body: archive.pipe(new stream.PassThrough()) },
})

process.stderr.write(`🚀 Uploading to s3://${Bucket}/${Key}...`)
upload.on('httpUploadProgress', () => process.stderr.write('.'))
await upload.done()

process.stderr.write(`done\n`)
}

return { files, bundled, filename, manifest, Bucket, Key }
}

0 comments on commit cfffe0e

Please sign in to comment.