Skip to content

Commit

Permalink
Add bucket serving url
Browse files Browse the repository at this point in the history
  • Loading branch information
mhuebert committed Jul 25, 2024
1 parent 6167caf commit b08cbb2
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 10 deletions.
13 changes: 10 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,7 @@ We will now find in our GCP account:
4. When the service account is added to the identity pool, it also has an attribute mapping specified to restrict usage.
- `attribute.repository` - `probcomp/gen-website-private`

There is also a second identity pool, `gen-website-private-publishers`, which grants all probcomp repositories access to the private bucket
within GitHub Actions.
There is also a second identity pool, `gen-website-private-publishers`, which grants all probcomp repositories access to the private bucket within GitHub Actions.

Using this identity pool, a GitHub action in any probcomp website can modify the `gen-website-private` bucket without restriction.

Expand All @@ -73,4 +72,12 @@ The certificate was free and expires in 15 years; it's only useful for use betwe

### CORS

CORS support is handled by `cors-config.json` which was added to the bucket via `gsutil cors set cors-config.json gs://gen-website-private` ([details](https://stackoverflow.com/questions/45273514/google-cloud-storage-gcs-cors-wildcard))
CORS support is handled by `cors-config.json` which was added to the bucket via `gsutil cors set cors-config.json gs://gen-website-private` ([details](https://stackoverflow.com/questions/45273514/google-cloud-storage-gcs-cors-wildcard))

### Accessing Files from Other Buckets

This server now supports accessing files from any Google Cloud Storage bucket that grants read access to the `gen-website-private-admin@probcomp-caliban.iam.gserviceaccount.com` service account. You can access these files using the following URL pattern:

```
https://probcomp-caliban.uc.r.appspot.com/bucket/<BUCKET_NAME>/<FILE_PATH>
```
32 changes: 25 additions & 7 deletions src/server.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,15 @@ const HTML_MAX_AGE = 60;

export const app = express();
const storage = new Storage();
const bucket = storage.bucket(BUCKET_NAME);
const default_bucket = storage.bucket(BUCKET_NAME);

const generateSignedUrl = memoizee(async (bucketPath) => {
const generateSignedUrl = memoizee(async (bucketName, bucketPath) => {
const options = {
version: 'v4',
action: 'read',
expires: Date.now() + 60 * 60 * 1000, // Signed URL is valid for 60 minutes
};
const [url] = await bucket.file(bucketPath).getSignedUrl(options);
const [url] = await storage.bucket(bucketName).file(bucketPath).getSignedUrl(options);
return [url, options.expires];
}, { maxAge: 50 * 60 * 1000 }); // Cache for 50 minutes

Expand All @@ -51,7 +51,7 @@ const handleResponseError = (res, error) => {

const pipeFile = async (res, path) => {
try {
const bucketStream = bucket.file(path).createReadStream()
const bucketStream = default_bucket.file(path).createReadStream()
bucketStream.on('error', (error) => handleResponseError(res, error))
const { mime, stream } = await getMimeType(bucketStream, { filename: path });
res.setHeader('Content-Type', mime);
Expand All @@ -71,15 +71,15 @@ const redirectFile = async (res, path) => {
// caching of private content. These headers *should* only control the redirect itself,
// but they cause Safari to refuse to cache the destination as well.

const [signedUrl, expires] = await generateSignedUrl(path);
const [signedUrl, expires] = await generateSignedUrl(BUCKET_NAME, path);
const maxAge = (expires - Date.now()) / 1000; // Calculate max-age in seconds
res.setHeader('Expires', new Date(expires).toUTCString());
res.setHeader('Cache-Control', `private, max-age=${maxAge}`);
res.redirect(302, signedUrl);
};

const serveHtml = async (res, path) => {
const htmlFile = bucket.file(path);
const htmlFile = default_bucket.file(path);
res.setHeader('Content-Type', 'text/html');
res.setHeader('Cache-Control', `private, max-age=${HTML_MAX_AGE}`);
return new Promise((resolve, reject) => {
Expand Down Expand Up @@ -181,6 +181,24 @@ const handleRequest = async (parentDomain, subDomain, filePath, req, res) => {
}
};


// Add this new route handler
app.get('/bucket/:bucketName/*', async (req, res) => {
const { bucketName } = req.params;
const filePath = req.params[0];

try {
const [signedUrl, expires] = await generateSignedUrl(bucketName, filePath);
const maxAge = (expires - Date.now()) / 1000; // Calculate max-age in seconds
res.setHeader('Expires', new Date(expires).toUTCString());
res.setHeader('Cache-Control', `private, max-age=${maxAge}`);
res.redirect(302, signedUrl);
} catch (error) {
console.error(`Error generating signed URL for ${bucketName}/${filePath}:`, error);
res.status(500).send('Internal Server Error');
}
});

if (process.env.ENV == 'dev') {
app.get('/:parentDomain/:subDomain/*', async (req, res) => {
await handleRequest(req.params.parentDomain, req.params.subDomain, req.params[0], req, res);
Expand Down Expand Up @@ -209,4 +227,4 @@ export const serve = (PORT) => {
app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);
});
}
}

0 comments on commit b08cbb2

Please sign in to comment.