Skip to content

Commit f9404ed

Browse files
updating GCP doco
1 parent b351a46 commit f9404ed

3 files changed

Lines changed: 57 additions & 77 deletions

File tree

src/bluemix/experiments/get.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ let storage = new Storage.S3(s3Config);
2121
function getURL(fileName, bucketName) {
2222
let params = { Bucket: bucketName, Key: fileName, Expires: 600 };
2323
return new Promise((resolve, reject) => {
24-
storage.getSignedUrl('getObject', params, function(err, url) {
24+
storage.getSignedUrl('getObject', params, function(err, url) { // Doesn't work with SigV4 :(
2525
if (err) reject(err);
2626
else resolve(url);
2727
});
Lines changed: 34 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@
22

33
// Initial template modifier from https://console.bluemix.net/docs/services/cloud-object-storage/hmac/presigned-urls.html#create-a-presigned-url
44

5+
// I've been searching around for a server-side presigning service similar to GCP,
6+
// at the moment it appears you need to use this client side code base, or an older version of signing (ie. not SigV4)
7+
58
const crypto = require('crypto');
69
const moment = require('moment');
710
const https = require('https');
@@ -12,48 +15,50 @@ const key = require('./../keys/ibm_storage.json');
1215
const accessKey = key.cos_hmac_keys.access_key_id;
1316
const secretKey = key.cos_hmac_keys.secret_access_key;
1417
const httpMethod = 'GET';
15-
const host = '{endpoint}';
16-
const region = '';
18+
const host = 's3.au-syd.cloud-object-storage.appdomain.cloud';
19+
const region = 'au-syd';
1720
const endpoint = 'https://' + host;
21+
1822
const bucket = 'example-bucket';
19-
const objectKey = 'example-object'
20-
const expiration = 86400 // time in seconds
23+
const objectKey = 'example-object';
24+
25+
const expiration = 86400; // time in seconds
2126

2227
// hashing and signing methods
2328
function hash(key, msg) {
24-
var hmac = crypto.createHmac('sha256', key);
25-
hmac.update(msg, 'utf8');
26-
return hmac.digest();
29+
let hmac = crypto.createHmac('sha256', key);
30+
hmac.update(msg, 'utf8');
31+
return hmac.digest();
2732
}
2833

2934
function hmacHex(key, msg) {
30-
var hmac = crypto.createHmac('sha256', key);
31-
hmac.update(msg, 'utf8');
32-
return hmac.digest('hex');
35+
let hmac = crypto.createHmac('sha256', key);
36+
hmac.update(msg, 'utf8');
37+
return hmac.digest('hex');
3338
}
3439

3540
function hashHex(msg) {
36-
var hash = crypto.createHash('sha256');
37-
hash.update(msg);
38-
return hash.digest('hex');
41+
let hash = crypto.createHash('sha256');
42+
hash.update(msg);
43+
return hash.digest('hex');
3944
}
4045

4146
// region is a wildcard value that takes the place of the AWS region value
4247
// as COS doesn't use the same conventions for regions, this parameter can accept any string
4348
function createSignatureKey(key, datestamp, region, service) {
44-
keyDate = hash(('AWS4' + key), datestamp);
45-
keyString = hash(keyDate, region);
46-
keyService = hash(keyString, service);
47-
keySigning = hash(keyService, 'aws4_request');
48-
return keySigning;
49+
keyDate = hash(('AWS4' + key), datestamp);
50+
keyString = hash(keyDate, region);
51+
keyService = hash(keyString, service);
52+
keySigning = hash(keyService, 'aws4_request');
53+
return keySigning;
4954
}
5055

5156
function createHexSignatureKey(key, datestamp, region, service) {
52-
keyDate = hashHex(('AWS4' + key), datestamp);
53-
keyString = hashHex(keyDate, region);
54-
keyService = hashHex(keyString, service);
55-
keySigning = hashHex(keyService, 'aws4_request');
56-
return keySigning;
57+
keyDate = hashHex(('AWS4' + key), datestamp);
58+
keyString = hashHex(keyDate, region);
59+
keyService = hashHex(keyString, service);
60+
keySigning = hashHex(keyService, 'aws4_request');
61+
return keySigning;
5762
}
5863

5964
function printDebug() {
@@ -152,13 +157,13 @@ console.log(`\nSending ${httpMethod} request to IBM COS -----------------------`
152157
console.log('Request URL = ' + requestUrl);
153158

154159
var request = https.get(requestUrl, function (response) {
155-
console.log('\nResponse from IBM COS ----------------------------------');
156-
console.log(`Response code: ${response.statusCode}\n`);
160+
console.log('\nResponse from IBM COS ----------------------------------');
161+
console.log(`Response code: ${response.statusCode}\n`);
157162

158-
response.on('data', function (chunk) {
159-
console.log('Response: ' + chunk);
160-
printDebug();
161-
});
163+
response.on('data', function (chunk) {
164+
console.log('Response: ' + chunk);
165+
printDebug();
166+
});
162167
});
163168

164169
request.end();

src/gcp/README.MD

Lines changed: 22 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,76 +1,51 @@
1-
# ImageAPI
2-
This project is experimenting with different providers of cloud based web hooks and storage.
3-
[Note: This is in the very early stages of the experimentation and is not currently functional. The specs and approach may change without any notice.]
1+
# ImageAPI-GCP
2+
This sub-project implements a basic Image management API in GCP. (index.js)
43

5-
The base use case is the ability to upload remote images into a cloud storage bucket as well as to retrieve and resize them.
6-
(Note: some providers have resizing capabilities inbuilt [eg. GCP], this project will examine other methods to implement the same)
4+
Please note: `index.js` contains the latest functioning code. `experiments/` is from my initial experimentation and may not be functional.
75

8-
## API Requirements
9-
- Upload Images via URL
10-
- Download Images / provide Signed URL
11-
- Resize Images
12-
- 80 day expiry
13-
14-
### Additional Considerations
15-
- Real time updates for large uploads
16-
- Performance [Optimization for write heavy load]
17-
- Caching
18-
- Security [Who can access which images]
19-
- Resilience []
6+
## Current State
7+
The first phase on this module has been completed.
8+
This encompasses the basic functionality of uploading a remote image (via stream), and providing a signed URL by which to access that image. It consists of one REST end point.
209

2110
## Setup
2211
1. Create Project and add Project_ID to config.json
2312
2. Create a service account with access to upload Google Functions and save the key in keys/gcp_functions.json
2413
3. Create Bucket in Google Cloud Storage and update bucket_name in config.json
2514
4. Create a service account with write permissions to your google storage bucket and save the key in keys/gcp_storage.json
26-
TODO: add script to deploy.sh to add Project_ID to bucket name
27-
28-
29-
### Service Accounts
30-
31-
## Leads
32-
Consider spinning up app in Firebase
33-
Resumable file uploads: https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload
34-
https://github.com/mkahn5/gcloud-resumable-uploads/blob/master/views/index.ejs
35-
Google Websocket demo: https://github.com/GoogleCloudPlatform/nodejs-docs-samples/tree/master/appengine/websockets
36-
Firebase approach to uploads: https://firebase.google.com/docs/storage/web/upload-files#monitor_upload_progress
37-
Pub/Sub: https://github.com/googleapis/nodejs-pubsub/
38-
3915

4016
## TODO
4117
- Filetype verification
4218
- Partitioned file uploads
4319
- Caching
44-
- Match Catch expiry to Bucket item expiration
45-
46-
## Architecture
47-
The API endpoint footprint is small and doesn't need to maintain any state. This makes it a prime candidate for Lamba-style webhooks. (Benefits: Auto-scaling, minimal upkeep, smaller code footprint etc.)
48-
User will interact with a single REST endpoint (/images) through GET and PUT requests.
49-
50-
![Image API GET sequence](/doc_assets/GET.png)
20+
- Match Cache expiry to Bucket item expiration
21+
- Add script to deploy.sh to add Project_ID to bucket name
22+
- Security and signing
5123

52-
![Image API PUT sequence](/doc_assets/PUT.png)
53-
54-
55-
### Provider
56-
Note: GCP's Node 8 driver is still Beta.
57-
58-
### Implementation
24+
## Implementation Notes
5925
#### Item Expiry
6026
The item expiry can be handled directly in the data bucket policy.
6127
`gsutil mb --retention 80d gs://ImageAPI`
6228
(For a more nuanced/extensible approach we could specify a more detailed lifecycle policy or have a lambda function launched via scheduled CRON job - eg. via cloud scheduler)
6329

30+
### GCP's Node 8 driver is still Beta.
6431

65-
## Implementation Notes
66-
### Code/Folder Structure - gcloud deploy limitations
32+
### Code/Folder Structure - gcloud deploy limitations
6733
The `gcloud deploy` command, unfortunately, appears to only work on a singular local index.js file (or remote repositories).
6834

69-
### (Prepackaged solutions)
35+
### (Prepackaged solutions)
7036
Google's AppEngine provides ready made image servicing, including resizing and cropping.
7137
https://cloud.google.com/appengine/docs/standard/python/refdocs/google.appengine.api.images#google.appengine.api.images.get_serving_url
7238
https://medium.com/google-cloud/uploading-resizing-and-serving-images-with-google-cloud-platform-ca9631a2c556
7339

7440
### Performance considerations
7541
#### HTTP Requests to Google Cloud
7642
We're currently utilizing Google's prebuilt node module for access. For a smaller footprint, to aid with speed of load for the cloud functions, we could use a streamlined request library to interact directly with their REST API.
43+
44+
45+
## Leads for future research
46+
Consider spinning up app in Firebase
47+
Resumable file uploads: https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload
48+
https://github.com/mkahn5/gcloud-resumable-uploads/blob/master/views/index.ejs
49+
Google Websocket demo: https://github.com/GoogleCloudPlatform/nodejs-docs-samples/tree/master/appengine/websockets
50+
Firebase approach to uploads: https://firebase.google.com/docs/storage/web/upload-files#monitor_upload_progress
51+
Pub/Sub: https://github.com/googleapis/nodejs-pubsub/

0 commit comments

Comments
 (0)