Uploads compiled assets to Amazon S3 after Rspack build. This plugin is a Rspack version of the popular webpack-s3-plugin.
- âś… Uploads build assets to S3 automatically after compilation
- âś… Supports file inclusion/exclusion patterns
- âś… CloudFront invalidation support
- âś… Progress indicators during upload
- âś… CDN URL replacement (cdnizer integration)
- âś… Priority-based upload ordering
- âś… Dynamic upload options
- âś… Built specifically for Rspack
pnpm add -D rspack-s3-plugin
# or
npm install --save-dev rspack-s3-plugin
# or
yarn add -D rspack-s3-plugin- Node.js >= 14.0.0
- Rspack >= 0.1.0
const S3Plugin = require("rspack-s3-plugin");
module.exports = {
// ... your rspack config
plugins: [
new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: "us-west-1",
},
s3UploadOptions: {
Bucket: "my-bucket-name",
},
}),
],
};new S3Plugin({
// Only upload CSS and JS files
include: /.*\.(css|js)$/,
// Exclude HTML files
exclude: /.*\.html$/,
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
}),new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
sessionToken: process.env.AWS_SESSION_TOKEN, // optional
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
cloudfrontInvalidateOptions: {
DistributionId: process.env.CLOUDFRONT_DISTRIBUTION_ID,
Items: ['/*'],
},
}),new S3Plugin({
basePath: 'assets/',
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
}),new S3Plugin({
basePathTransform: async () => {
// Return a promise or string
const gitSha = require('child_process')
.execSync('git rev-parse --short HEAD')
.toString()
.trim();
return `v${gitSha}/`;
},
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
}),new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
cdnizerOptions: {
defaultCDNBase: 'https://cdn.example.com',
},
}),new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
ContentEncoding(fileName) {
if (/\.gz$/.test(fileName)) {
return 'gzip';
}
},
ContentType(fileName) {
if (/\.js$/.test(fileName)) {
return 'application/javascript';
}
return 'text/plain';
},
},
}),new S3Plugin({
priority: [
/.*\.html$/, // Upload HTML files last
/.*\.css$/, // Upload CSS files second to last
],
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
}),You can use AWS credentials file instead of environment variables:
const { fromIni } = require('@aws-sdk/credential-providers');
new S3Plugin({
s3Options: {
credentials: fromIni({ profile: 'my-profile' }),
region: 'us-west-1',
},
s3UploadOptions: {
Bucket: 'my-bucket-name',
},
}),| Option | Type | Default | Description |
|---|---|---|---|
s3Options |
Object | {} |
AWS S3 client configuration (required) |
s3UploadOptions |
Object | {} |
S3 upload options. Must include Bucket (required) |
include |
RegExp/Function/Array/String | undefined |
Pattern to match files to include |
exclude |
RegExp/Function/Array/String | undefined |
Pattern to match files to exclude |
basePath |
String | '' |
Base path prefix for uploaded files |
basePathTransform |
Function | (path) => path |
Transform function for base path (can return Promise) |
directory |
String | output.path |
Directory to upload (if not set, uses compilation assets) |
htmlFiles |
String/Array | [] |
HTML files to process with cdnizer |
cdnizerOptions |
Object | {} |
Options for cdnizer (if empty, cdnizer is disabled) |
cloudfrontInvalidateOptions |
Object | {} |
CloudFront invalidation options |
progress |
Boolean | true |
Show upload progress |
priority |
Array | undefined |
Priority order for file uploads |
AWS SDK v3 S3 client configuration. See AWS SDK v3 S3 Client documentation.
S3 upload parameters. See AWS SDK v3 PutObjectCommand documentation.
Required:
Bucket: The S3 bucket name
Optional:
Key: Can be a function(fileName, filePath) => stringContentType: Can be a function(fileName, filePath) => stringContentEncoding: Can be a function(fileName, filePath) => string- Any other S3 PutObject parameters
DistributionId: String or Array of CloudFront distribution IDsItems: Array of paths to invalidate (e.g.,['/*'])
The include and exclude options support multiple rule types:
- RegExp: Regular expression to test against file path
- Function: Function that receives the file path and returns boolean
- Array: Array of rules (all must match for include, any match for exclude)
- String: String pattern converted to RegExp
Example:
new S3Plugin({
include: [
/.*\.(css|js)$/,
(filePath) => filePath.includes('assets'),
],
exclude: /.*\.map$/,
// ... other options
}),- Uses Rspack's
getAssets()API instead ofcompilation.assets - Uses native
fs.promisesinstead ofrecursive-readdir - Replaced
lodashwith native JavaScript methods - Replaced
progresspackage with lightweight custom implementation - Updated to latest AWS SDK v3
- Updated
mimeto v4 - Built specifically for Rspack (not compatible with Webpack)
The API is identical to webpack-s3-plugin, so you can simply replace the import:
// Before
const S3Plugin = require("webpack-s3-plugin");
// After
const S3Plugin = require("rspack-s3-plugin");MIT
Contributions are welcome! Please feel free to submit a Pull Request.
This plugin is based on webpack-s3-plugin by Mika Kalathil, adapted for Rspack.