Ecosyste.ms: Awesome
An open API service indexing awesome lists of open source software.
https://github.com/ankushtechdev/transfer-files-from-google-cloud-storage-to-aws-s3-bucket
https://github.com/ankushtechdev/transfer-files-from-google-cloud-storage-to-aws-s3-bucket
Last synced: 5 days ago
JSON representation
- Host: GitHub
- URL: https://github.com/ankushtechdev/transfer-files-from-google-cloud-storage-to-aws-s3-bucket
- Owner: AnkushTechDev
- Created: 2021-04-14T07:40:41.000Z (over 3 years ago)
- Default Branch: main
- Last Pushed: 2021-05-01T06:48:59.000Z (over 3 years ago)
- Last Synced: 2024-04-05T18:48:27.022Z (7 months ago)
- Language: JavaScript
- Size: 3.91 KB
- Stars: 0
- Watchers: 2
- Forks: 0
- Open Issues: 0
-
Metadata Files:
- Readme: README.md
Awesome Lists containing this project
README
# Transfer files from google cloud storage to aws s3 bucket
```js
exports.helloWorld = async (req, res) => {
let message = req.query.message || req.body.message || 'Hello World!';
const datasetId = "";
const bucketName = BUCKET_NAME;
var loop = new Date();
var date = ('0' + loop.getDate()).slice(-2);
var month = ('0' + loop.getMonth() + 1).slice(-2);
var year = loop.getFullYear();const tableId = "events_" + year + month + date;
const filename = "events-" + year + month + date + ".json";// Location must match that of the source table.
const options = {
location: 'US',
gzip: true,
};const [job] = await bigquery
.dataset(datasetId)
.table(tableId)
.extract(storageTwo.bucket(bucketName).file(filename), options);console.log(`Job ${job.id} created.`);
const [files] = await storageTwo.bucket(BUCKET_NAME).getFiles();
files.forEach(file => {
if (file.name === filename) {var file = storageTwo.bucket(BUCKET_NAME).file(file.name);
file.getSignedUrl({
version: 'v4',
action: 'read',
expires: Date.now() + 60 * 60 * 1000, // 15 minutes
}, function(err, url) {
if (err) {
console.error(err);
}
axios.get(url, {
responseType: 'arraybuffer'
})
.then(response => {
const buffer = Buffer.from(response.data, 'base64');
return (async () => {
let type = (await fileType.fromBuffer(buffer)).mime
var params = {
Key: file.name,
Body: buffer,
Bucket: '',
ContentType: type,
ACL: 'private' //becomes a public URL
}AWS.config.update({
accessKeyId: '',
secretAccessKey: ''
})var s3 = new AWS.S3()
s3.upload(params).promise().then((response) => {
console.log('response', response);
}, (err) => {
console.log('err 2', err);
})})();
})
.catch(err => {
console.log('err 3', err);
});});
}});
res.status(200).send(message);
};```