1
0
Fork 0
mirror of https://github.com/openstf/stf synced 2025-10-04 18:29:17 +02:00

Make ESLint happy and use the existing style where possible.

This commit is contained in:
Simo Kinnunen 2015-11-06 02:31:28 +09:00
parent 3d6ddf79dd
commit 467ed01903
2 changed files with 55 additions and 44 deletions

View file

@ -764,19 +764,24 @@ program
, String) , String)
.option('--profile <name>' .option('--profile <name>'
, 'your aws credentials profile name' , 'your aws credentials profile name'
, String , String)
, 'stf-storage')
.option('--endpoint <endpoint>' .option('--endpoint <endpoint>'
, 'your buckets endpoint' , 'your buckets endpoint'
, String , String)
, 's3-ap-northeast-1.amazonaws.com')
.action(function(options) { .action(function(options) {
require('./units/storage/amazons3')({ if (!options.profile) {
this.missingArgument('--profile')
}
if (!options.endpoint) {
this.missingArgument('--endpoint')
}
require('./units/storage/s3')({
port: options.port port: options.port
, profile: options.profile , profile: options.profile
, bucket: options.bucket , bucket: options.bucket
, endpoint: options.endpoint , endpoint: options.endpoint
, expires: options.expires
}) })
}) })

View file

@ -11,21 +11,19 @@ var Promise = require('bluebird')
var uuid = require('node-uuid') var uuid = require('node-uuid')
var AWS = require('aws-sdk') var AWS = require('aws-sdk')
var lifecycle = require('../../util/lifecycle')
var logger = require('../../util/logger') var logger = require('../../util/logger')
var requtil = require('../../util/requtil')
module.exports = function(options) { module.exports = function(options) {
var log = logger.createLogger('storage:s3') var log = logger.createLogger('storage:s3')
, app = express() , app = express()
, server = http.createServer(app) , server = http.createServer(app)
, credentials = new AWS.SharedIniFileCredentials({
var s3 = new AWS.S3({
credentials: new AWS.SharedIniFileCredentials({
profile: options.profile profile: options.profile
}) })
, endpoint: options.endpoint
AWS.config.credentials = credentials; })
var s3 = new AWS.S3(options)
app.set('strict routing', true) app.set('strict routing', true)
app.set('case sensitive routing', true) app.set('case sensitive routing', true)
@ -37,23 +35,27 @@ module.exports = function(options) {
function putObject(plugin, file) { function putObject(plugin, file) {
return new Promise(function(resolve, reject) { return new Promise(function(resolve, reject) {
var id = uuid.v4() var id = uuid.v4()
var rs = fs.createReadStream(file.path)
s3.putObject({ s3.putObject({
Key: id Key: id
, Body: rs , Body: fs.createReadStream(file.path)
, Bucket: options.bucket , Bucket: options.bucket
, Metadata: { , Metadata: {
plugin: plugin plugin: plugin
, name: file.name , name: file.name
} }
}, function(err, data) { }, function(err) {
if (err) { if (err) {
log.error('failed to store "%s" bucket:"%s"', id, options.bucket) log.error(
log.error(err); 'Unable to store "%s" as "%s/%s"'
reject(err); , file.temppath
} else { , options.bucket
log.info('Stored "%s" to %s/%s', file.name, options.bucket, id) , id
, err.stack
)
reject(err)
}
else {
log.info('Stored "%s" as "%s/%s"', file.name, options.bucket, id)
resolve(id) resolve(id)
} }
}) })
@ -69,7 +71,7 @@ module.exports = function(options) {
) )
} }
app.post('/s/upload/:plugin', function(req, res, next) { app.post('/s/upload/:plugin', function(req, res) {
var form = new formidable.IncomingForm() var form = new formidable.IncomingForm()
var plugin = req.params.plugin var plugin = req.params.plugin
Promise.promisify(form.parse, form)(req) Promise.promisify(form.parse, form)(req)
@ -91,11 +93,10 @@ module.exports = function(options) {
}) })
.then(function(storedFiles) { .then(function(storedFiles) {
res.status(201).json({ res.status(201).json({
success: true, success: true
resources: (function() { , resources: (function() {
var mapped = Object.create(null) var mapped = Object.create(null)
storedFiles.forEach(function(file) { storedFiles.forEach(function(file) {
var plugin = req.params.plugin
mapped[file.field] = { mapped[file.field] = {
date: new Date() date: new Date()
, plugin: plugin , plugin: plugin
@ -109,41 +110,46 @@ module.exports = function(options) {
}) })
return storedFiles return storedFiles
}) })
.then(function (storedFiles){ .then(function(storedFiles) {
storedFiles.forEach(function (file){ return Promise.all(storedFiles.map(function(file) {
fs.unlink(file.temppath) return Promise.promisify(fs.unlink, fs)(file.temppath)
.catch(function(err) {
log.warn('Unable to clean up "%s"', file.temppath, err.stack)
return true
}) })
}))
}) })
.catch(function(err) { .catch(function(err) {
log.error('Error storing resource', err.stack) log.error('Error storing resource', err.stack)
res.status(500) res.status(500)
.json({ .json({
success: false, success: false
error: 'ServerError' , error: 'ServerError'
}) })
}) })
}) })
app.get('/s/blob/:id/:name', function(req, res) { app.get('/s/blob/:id/:name', function(req, res) {
var params = { var params = {
Key: req.params.id, Key: req.params.id
Bucket: options.bucket , Bucket: options.bucket
} }
s3.getObject(params, function(err, data) { s3.getObject(params, function(err, data) {
if (err) { if (err) {
log.error('failed to retreive[' + path + ']') log.error('Unable to retrieve "%s"', path, err.stack)
log.error(err, err.stack);
res.sendStatus(404) res.sendStatus(404)
} else { return
res.set({
'Content-type': data.ContentType
})
res.send(data.Body)
} }
res.set({
'Content-Type': data.ContentType
})
res.send(data.Body)
}) })
}) })
// initialize
server.listen(options.port) server.listen(options.port)
console.log('Listening on port %d', options.port) console.log('Listening on port %d', options.port)
} }