This commit is contained in:
Manuel Romero
2019-12-11 11:00:47 +01:00
parent 06ebe40aaf
commit d5edf32c04
12 changed files with 60 additions and 69 deletions

7
.gitignore vendored
View File

@@ -37,11 +37,10 @@ testem.log
.DS_Store .DS_Store
Thumbs.db Thumbs.db
**/*/proc-logs/**/*.log /logs/**/*.log
**/*/.terraform* **/*/.terraform*
**/*/terraform.tfstate* **/*/terraform.tfstate*
scenarios_templates/azqmi-qseok/bin
.vscode/ .vscode/
server/scenarios_templates* /scenarios_templates*
server/az-tf-templates /az-tf-templates

View File

@@ -59,6 +59,7 @@ services:
- npm install & npm run dev - npm install & npm run dev
volumes: volumes:
- .:/var/www/app - .:/var/www/app
- ./logs:/logs
depends_on: depends_on:
- mongo - mongo
- redis - redis
@@ -85,6 +86,7 @@ services:
command: "sh -c 'npm install && npm run worker:dev'" command: "sh -c 'npm install && npm run worker:dev'"
volumes: volumes:
- .:/var/www/app - .:/var/www/app
- ./logs:/logs
- /var/run/docker.sock:/home/docker.sock - /var/run/docker.sock:/home/docker.sock
depends_on: depends_on:
- mongo - mongo

View File

@@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
branch=master branch=master
rm -fr ./server/az-tf-templates rm -fr ./az-tf-templates
git clone -b $branch git@gitlab.com:qmi/qmi-cloud-scenarios.git ./server/az-tf-templates git clone -b $branch git@gitlab.com:qmi/qmi-cloud-scenarios.git ./az-tf-templates

View File

@@ -5,25 +5,22 @@ const docker = new Docker({
const path = require('path'); const path = require('path');
const fs = require('fs'); const fs = require('fs');
const PROJECT_PATH = process.env.PROJECT_PATH; const PROJECT_PATH = process.env.PROJECT_PATH;
const scenarioPathOrig = path.join(PROJECT_PATH, '..', 'qmi-cloud-provisions');
const DOCKERIMAGE = "hashicorp/terraform:0.12.14"; const DOCKERIMAGE = "hashicorp/terraform:0.12.14";
const init = function( scenario, provisionId ) { const init = function( provMongo ) {
const scenarioPath = path.join(scenarioPathOrig, `${scenario}_${provisionId}`); const templatePath = path.join(PROJECT_PATH, 'az-tf-templates', provMongo.scenario);
const templatePath = path.join(PROJECT_PATH, 'server', 'az-tf-templates', scenario); const name = `qmi-tf-init-${provMongo._id}`;
const name = `qmi-tf-init-${provisionId}`;
console.log(`Provision: will spin up container: ${name}`); console.log(`Provision: will spin up container: ${name}`);
const logFile = path.resolve(__dirname, '..', 'proc-logs', 'provision', `${provisionId}.log`); var processStream = fs.createWriteStream(provMongo.logFile, {flags:'a'});
var processStream = fs.createWriteStream(logFile, {flags:'a'});
return docker.run(DOCKERIMAGE, ['init', '-no-color', '-from-module=/template'], processStream, { return docker.run(DOCKERIMAGE, ['init', '-no-color', '-from-module=/template'], processStream, {
"Env": ["NODE_ENV=development"], //"Env": ["NODE_ENV=development"],
"name": name, "name": name,
"WorkingDir": "/app", "WorkingDir": "/app",
"HostConfig": { "HostConfig": {
"Binds": [ "Binds": [
`${scenarioPath}:/app`, `${provMongo.path}:/app`,
`${templatePath}:/template` `${templatePath}:/template`
] ]
} }
@@ -34,25 +31,24 @@ const init = function( scenario, provisionId ) {
return container.remove(); return container.remove();
}).then(function() { }).then(function() {
console.log(`Provision: ${name} removed!`); console.log(`Provision: ${name} removed!`);
return "ok"; return provMongo;
}); });
} }
const apply = function( scenario, provisionId, userId ) { const apply = function( provMongo, user ) {
const scenarioPath = path.join(scenarioPathOrig, `${scenario}_${provisionId}`); const name = `qmi-tf-apply-${provMongo._id}`;
const name = `qmi-tf-apply-${provisionId}`;
const logFile = path.resolve(__dirname, '..', 'proc-logs', 'provision', `${provisionId}.log`);
console.log(`Provision: will spin up container: ${name}`); console.log(`Provision: will spin up container: ${name}`);
var processStream = fs.createWriteStream(logFile, {flags:'a'}); var processStream = fs.createWriteStream(provMongo.logFile, {flags:'a'});
//var processStream = process.stdout;
return docker.run(DOCKERIMAGE, ['apply', '-no-color', '-var-file=scenario.tfvars', '-var', `provision_id=${provisionId}`, '-var', `user_id=${userId}`, '-auto-approve'], processStream, {
"Env": ["NODE_ENV=development"], return docker.run(DOCKERIMAGE, ['apply', '-no-color', '-var-file=scenario.tfvars', '-var', `provision_id=${provMongo._id}`, '-var', `user_id=${user}`, '-auto-approve'], processStream, {
//"Env": ["NODE_ENV=development"],
"name": name, "name": name,
"WorkingDir": "/app", "WorkingDir": "/app",
"HostConfig": { "HostConfig": {
"Binds": [ "Binds": [
`${scenarioPath}:/app`, `${provMongo.path}:/app`,
"/Users/aor/.ssh:/root/.ssh" "/Users/aor/.ssh:/root/.ssh"
], ],
"NetworkMode": "host" "NetworkMode": "host"
@@ -64,27 +60,23 @@ const apply = function( scenario, provisionId, userId ) {
return container.remove(); return container.remove();
}).then(function() { }).then(function() {
console.log(`Provision: ${name} removed!`); console.log(`Provision: ${name} removed!`);
const output = fs.readFileSync(logFile); return fs.readFileSync(provMongo.logFile);
return output;
}) })
} }
const destroy = function(scenario, provisionId, destroyId) { const destroy = function(destroyMongo, provMongo) {
const scenarioPath = path.join(scenarioPathOrig, `${scenario}_${provisionId}`); const name = `qmi-tf-destroy-${destroyMongo._id}`;
console.log("DESTROY PATH", scenarioPath);
const name = `qmi-tf-destroy-${destroyId}`;
const logFile = path.resolve(__dirname, '..', 'proc-logs', 'destroy', `${destroyId}.log`);
console.log(`Destroy Provision: will spin up container: ${name}`); console.log(`Destroy Provision: will spin up container: ${name}`);
var processStream = fs.createWriteStream(logFile, {flags:'a'}); var processStream = fs.createWriteStream(destroyMongo.logFile, {flags:'a'});
return docker.run(DOCKERIMAGE, ['destroy', '-no-color', '-var-file=scenario.tfvars', "-var", `provision_id=${provisionId}`, '-auto-approve'], processStream, { return docker.run(DOCKERIMAGE, ['destroy', '-no-color', '-var-file=scenario.tfvars', "-var", `provision_id=${destroyMongo.provId}`, '-auto-approve'], processStream, {
"Env": ["NODE_ENV=development"], //"Env": ["NODE_ENV=development"],
"name": name, "name": name,
"WorkingDir": "/app", "WorkingDir": "/app",
"HostConfig": { "HostConfig": {
"Binds": [ "Binds": [
`${scenarioPath}:/app`, `${provMongo.path}:/app`,
"/Users/aor/.ssh:/root/.ssh" "/Users/aor/.ssh:/root/.ssh"
] ]
} }
@@ -95,7 +87,7 @@ const destroy = function(scenario, provisionId, destroyId) {
return container.remove(); return container.remove();
}).then(async function(data) { }).then(async function(data) {
console.log(`Processor Destroy: '${name}' removed!`); console.log(`Processor Destroy: '${name}' removed!`);
return fs.readFileSync(logFile); return fs.readFileSync(destroyMongo.logFile);
}); });
} }

View File

@@ -17,6 +17,7 @@ const destroySchema = new mongoose.Schema({
type: String, type: String,
default: "queued" default: "queued"
}, },
logFile: String,
jobId: String, jobId: String,
provId: String provId: String
}); });

View File

@@ -3,39 +3,36 @@ const path = require('path');
const PROJECT_PATH = process.env.PROJECT_PATH; const PROJECT_PATH = process.env.PROJECT_PATH;
const tf = require("./docker/tf.js"); const tf = require("./docker/tf.js");
module.exports = function(job){ module.exports = async function(job){
const scenarioPath = path.join(PROJECT_PATH, '..', 'qmi-cloud-provisions', `${job.data.scenario}_${job.data.id}`); const provJob = await db.provision.update(job.data.id, {
db.provision.update(job.data.id, {
"status": "initializing", "status": "initializing",
"jobId": job.id, "jobId": job.id,
"logFile": `/proc-logs/${job.data.id}.log`, "logFile": path.join('/logs', 'provision', `${job.data.id}.log`),
"path": scenarioPath "path": path.join(PROJECT_PATH, '..', 'qmi-cloud-provisions', `${job.data.scenario}_${job.data.id}`)
}); });
// TERRAFORM INIT // TERRAFORM INIT
return tf.init(job.data.scenario, job.data.id) return tf.init(provJob)
.then(function() { .then(async function() {
let update = db.provision.update(job.data.id,{"status": "provisioning"}); return await db.provision.update(provJob._id,{"status": "provisioning"});
return update; }).then(function(provJobUpdated) {
}).then(function() {
// TERRAFORM APPLY // TERRAFORM APPLY
return tf.apply(job.data.scenario, job.data.id, job.data.user.displayName); return tf.apply(provJobUpdated, job.data.user.displayName);
}).then(function(output) { }).then(function(output) {
let update; let update;
if ( output.indexOf("Error") !== -1 ) { if ( output.indexOf("Error") !== -1 ) {
update = db.provision.update(job.data.id, {"status": "error"}); update = db.provision.update(provJob._id, {"status": "error"});
} else { } else {
update = db.provision.update(job.data.id, {"status": "provisioned"}); update = db.provision.update(provJob._id, {"status": "provisioned"});
} }
return update; return update;
}).then(function(mongoUpdated){ }).then(function(mongoUpdated){
return Promise.resolve({"success": true, job: mongoUpdated}); return Promise.resolve({"success": true, provMongo: mongoUpdated});
}).catch(function(err) { }).catch(function(err) {
console.log("Provision: err", err); console.log("Provision: error", err);
db.provision.update(job.data.id, {"status": "error"}); db.provision.update(provJob._id, {"status": "error"});
return Promise.reject({"success": false, "err": err}); return Promise.reject({"success": false, "error": err});
}); });
} }

View File

@@ -1,31 +1,33 @@
const tf = require('./docker/tf.js'); const tf = require('./docker/tf.js');
const db = require('./mongo.js'); const db = require('./mongo.js');
const path = require('path');
module.exports = async function(job){ module.exports = async function(job){
var destroyJob = await db.destroy.update(job.data.id, { var destroyMongo = await db.destroy.update(job.data.id, {
"status": "destroying", "status": "destroying",
"jobId": job.id "jobId": job.id,
"logFile": path.join('/logs', 'destroy', `${job.data.id}.log`)
}); });
//var processStream = process.stdout; var provMongo = await db.provision.getSingle(job.data.provId);
return tf.destroy(job.data.scenario, job.data.provId, destroyJob._id) return tf.destroy(destroyMongo, provMongo)
.then(async function(output) { .then(async function(output) {
let update, update2; let update, update2;
if ( output.indexOf("Error") !== -1 ) { if ( output.indexOf("Error") !== -1 ) {
update = await db.destroy.update(destroyJob._id,{"status": "error"}); update = await db.destroy.update(destroyMongo._id,{"status": "error"});
update2 = await db.provision.update(job.data.provId, {"isDestroyed": false}); update2 = await db.provision.update(destroyMongo.provId, {"isDestroyed": false});
} else { } else {
update = await db.destroy.update(destroyJob._id, {"status": "destroyed"}); update = await db.destroy.update(destroyMongo._id, {"status": "destroyed"});
update2 = await db.provision.update(job.data.provId, {"isDestroyed": true}); update2 = await db.provision.update(destroyMongo.provId, {"isDestroyed": true});
} }
return { destroy: update, provision: update2 }; return { destroy: update, provision: update2 };
}).then(async function(res) { }).then(async function(res) {
return Promise.resolve({"success": true, job: res}); return Promise.resolve({"success": true, job: res});
}).catch(function(err) { }).catch(function(err) {
console.log("Processor Destroy: err", err); console.log("Processor Destroy: err", err);
db.destroy.update(destroyJob._id, {"status": "error", "isDestroyed": false}); db.destroy.update(destroyMongo._id, {"status": "error", "isDestroyed": false});
return Promise.reject({"success": false, "err": err}); return Promise.reject({"success": false, "err": err});
}); });

View File

@@ -1,7 +1,6 @@
const express = require('express') const express = require('express')
const router = express.Router() const router = express.Router()
const db = require('../mongo.js'); const db = require('../mongo.js');
const path = require("path");
const passport = require('../passport'); const passport = require('../passport');
@@ -84,7 +83,7 @@ router.get('/:id/logs', passport.ensureAuthenticated ,async (req, res, next) =>
if (!mongoJob){ if (!mongoJob){
return res.status(404).json({"msg": "Not found"}); return res.status(404).json({"msg": "Not found"});
} }
return res.sendFile(path.resolve(__dirname, '..', 'proc-logs', 'destroy', mongoJob._id +".log")); return res.sendFile(mongoJob.logFile);
} catch (error) { } catch (error) {
next(error); next(error);
} }

View File

@@ -1,7 +1,6 @@
const express = require('express') const express = require('express')
const router = express.Router() const router = express.Router()
const db = require('../mongo.js'); const db = require('../mongo.js');
const path = require("path");
const passport = require('../passport'); const passport = require('../passport');
@@ -119,7 +118,7 @@ router.get('/:id/logs', passport.ensureAuthenticated, async (req, res, next) =>
if (!mongoJob){ if (!mongoJob){
return res.status(404).json({"msg": "Not found"}); return res.status(404).json({"msg": "Not found"});
} }
return res.sendFile(path.resolve(__dirname, '..', 'proc-logs', 'provision', mongoJob._id +".log")); return res.sendFile(mongoJob.logFile);
} catch (error) { } catch (error) {
next(error); next(error);
} }

View File

@@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
branch=master branch=master
cd ./server/az-tf-templates cd ./az-tf-templates
git checkout master git checkout master
git checkout . git checkout .
git pull origin $branch git pull origin $branch