6184a70ba4
The upstream gcp aggressively closes the connection once it has received Content-Length bytes. However the @google-cloud/storage module doesn't handle this well and emits no event in this case. We were setting Content-Length because it's slightly more efficient and was important for our download progress bar (not anymore). The download should function fine without setting the Content-Length, and allows the storage stream to finish before closing the upstream socket.
37 lines
762 B
JavaScript
37 lines
762 B
JavaScript
const { Storage } = require('@google-cloud/storage');
|
|
const storage = new Storage();
|
|
|
|
class GCSStorage {
|
|
constructor(config, log) {
|
|
this.bucket = storage.bucket(config.gcs_bucket);
|
|
this.log = log;
|
|
}
|
|
|
|
async length(id) {
|
|
const data = await this.bucket.file(id).getMetadata();
|
|
return data[0].size;
|
|
}
|
|
|
|
getStream(id) {
|
|
return this.bucket.file(id).createReadStream({ validation: false });
|
|
}
|
|
|
|
set(id, file) {
|
|
return new Promise((resolve, reject) => {
|
|
file
|
|
.pipe(this.bucket.file(id).createWriteStream())
|
|
.on('error', reject)
|
|
.on('finish', resolve);
|
|
});
|
|
}
|
|
|
|
del(id) {
|
|
return this.bucket.file(id).delete();
|
|
}
|
|
|
|
ping() {
|
|
return this.bucket.exists();
|
|
}
|
|
}
|
|
|
|
module.exports = GCSStorage;
|