Skip to content
This repository was archived by the owner on Feb 1, 2022. It is now read-only.

Commit 9638294

Browse files
authored
feat: enable pulling from multiple buckets (#14)
1 parent 0c946a4 commit 9638294

File tree

2 files changed

+62
-27
lines changed

2 files changed

+62
-27
lines changed

README.md

Lines changed: 23 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,20 +27,40 @@ require("dotenv").config({
2727
module.exports = {
2828
plugins: [
2929
{
30-
resolve: "gatsby-source-s3",
30+
resolve: `@robinmetral/gatsby-source-s3`,
3131
options: {
3232
aws: {
3333
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
3434
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
35-
region: "eu-central-1"
35+
region: process.env.AWS_REGION
3636
},
37-
buckets: ["my-bucket-name", "other-bucket"]
37+
buckets: ["my-bucket", "my-second-bucket"]
3838
}
3939
}
4040
]
4141
};
4242
```
4343

44+
Currently, your buckets will need to be configured for public access with this
45+
access policy: (add your bucket name under `Statement.Resource`)
46+
47+
```json
48+
{
49+
"Version": "2008-10-17",
50+
"Statement": [
51+
{
52+
"Sid": "AllowPublicRead",
53+
"Effect": "Allow",
54+
"Principal": {
55+
"AWS": "*"
56+
},
57+
"Action": "s3:GetObject",
58+
"Resource": "arn:aws:s3:::my-bucket/*"
59+
}
60+
]
61+
}
62+
```
63+
4464
## Query
4565

4666
S3 objects can be queried in GraphQL as "s3Object" of "allS3Object":

src/gatsby-node.ts

Lines changed: 39 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ type pluginOptionsType = {
1313
buckets: string[];
1414
};
1515

16+
type ObjectType = AWS.S3.Object & { Bucket: string };
17+
1618
// source all objects from s3
1719
export async function sourceNodes(
1820
{ actions: { createNode }, createNodeId, createContentDigest, reporter },
@@ -26,42 +28,55 @@ export async function sourceNodes(
2628
// get objects
2729
const s3 = new AWS.S3();
2830

29-
try {
31+
const listObjects = async bucket => {
3032
// todo improve this call
3133
// see https://stackoverflow.com/a/49888947
3234
const response = await s3
3335
.listObjectsV2({
34-
// todo handle several buckets
35-
Bucket: buckets[0]
36+
Bucket: bucket
3637
// todo handle continuation token
3738
// ContinuationToken: token,
3839
})
3940
.promise();
4041

42+
// add bucket key
43+
const objects = response.Contents?.reduce((acc: ObjectType[], cur) => {
44+
const object: ObjectType = { ...cur, Bucket: bucket };
45+
acc.push(object);
46+
return acc;
47+
}, []);
48+
49+
return objects;
50+
};
51+
52+
try {
53+
const objects = await Promise.all(
54+
buckets.map(bucket => listObjects(bucket))
55+
);
56+
4157
// create file nodes
4258
// todo touch nodes if they exist already
43-
response.Contents &&
44-
response.Contents.forEach(async object => {
45-
const { Key } = object;
46-
const { region } = awsConfig;
47-
const node = {
48-
// node meta
49-
id: createNodeId(`s3-object-${object.Key}`),
50-
parent: null,
51-
children: [],
52-
internal: {
53-
type: "S3Object",
54-
content: JSON.stringify(object),
55-
contentDigest: createContentDigest(object)
56-
},
57-
// s3 object data
58-
Url: `https://s3.${region ? `${region}.` : ""}amazonaws.com/${
59-
buckets[0]
60-
}/${Key}`,
61-
...object
62-
};
63-
createNode(node);
59+
objects?.flat().forEach(async object => {
60+
const { Key, Bucket } = object;
61+
const { region } = awsConfig;
62+
63+
createNode({
64+
...object,
65+
// construct url
66+
Url: `https://s3.${
67+
region ? `${region}.` : ""
68+
}amazonaws.com/${Bucket}/${Key}`,
69+
// node meta
70+
id: createNodeId(`s3-object-${Key}`),
71+
parent: null,
72+
children: [],
73+
internal: {
74+
type: "S3Object",
75+
content: JSON.stringify(object),
76+
contentDigest: createContentDigest(object)
77+
}
6478
});
79+
});
6580
} catch (error) {
6681
reporter.error(error);
6782
}

0 commit comments

Comments
 (0)