This commit is contained in:
Manuel Romero
2019-12-11 11:00:47 +01:00
parent 06ebe40aaf
commit d5edf32c04
12 changed files with 60 additions and 69 deletions

7
.gitignore vendored
View File

@@ -37,11 +37,10 @@ testem.log
.DS_Store
Thumbs.db
**/*/proc-logs/**/*.log
/logs/**/*.log
**/*/.terraform*
**/*/terraform.tfstate*
scenarios_templates/azqmi-qseok/bin
.vscode/
server/scenarios_templates*
server/az-tf-templates
/scenarios_templates*
/az-tf-templates

View File

@@ -59,6 +59,7 @@ services:
- npm install & npm run dev
volumes:
- .:/var/www/app
- ./logs:/logs
depends_on:
- mongo
- redis
@@ -85,6 +86,7 @@ services:
command: "sh -c 'npm install && npm run worker:dev'"
volumes:
- .:/var/www/app
- ./logs:/logs
- /var/run/docker.sock:/home/docker.sock
depends_on:
- mongo

View File

@@ -1,5 +1,5 @@
#!/bin/bash
branch=master
rm -fr ./server/az-tf-templates
git clone -b $branch git@gitlab.com:qmi/qmi-cloud-scenarios.git ./server/az-tf-templates
rm -fr ./az-tf-templates
git clone -b $branch git@gitlab.com:qmi/qmi-cloud-scenarios.git ./az-tf-templates

View File

@@ -5,25 +5,22 @@ const docker = new Docker({
const path = require('path');
const fs = require('fs');
const PROJECT_PATH = process.env.PROJECT_PATH;
const scenarioPathOrig = path.join(PROJECT_PATH, '..', 'qmi-cloud-provisions');
const DOCKERIMAGE = "hashicorp/terraform:0.12.14";
const init = function( scenario, provisionId ) {
const init = function( provMongo ) {
const scenarioPath = path.join(scenarioPathOrig, `${scenario}_${provisionId}`);
const templatePath = path.join(PROJECT_PATH, 'server', 'az-tf-templates', scenario);
const name = `qmi-tf-init-${provisionId}`;
const templatePath = path.join(PROJECT_PATH, 'az-tf-templates', provMongo.scenario);
const name = `qmi-tf-init-${provMongo._id}`;
console.log(`Provision: will spin up container: ${name}`);
const logFile = path.resolve(__dirname, '..', 'proc-logs', 'provision', `${provisionId}.log`);
var processStream = fs.createWriteStream(logFile, {flags:'a'});
var processStream = fs.createWriteStream(provMongo.logFile, {flags:'a'});
return docker.run(DOCKERIMAGE, ['init', '-no-color', '-from-module=/template'], processStream, {
"Env": ["NODE_ENV=development"],
//"Env": ["NODE_ENV=development"],
"name": name,
"WorkingDir": "/app",
"HostConfig": {
"Binds": [
`${scenarioPath}:/app`,
`${provMongo.path}:/app`,
`${templatePath}:/template`
]
}
@@ -34,25 +31,24 @@ const init = function( scenario, provisionId ) {
return container.remove();
}).then(function() {
console.log(`Provision: ${name} removed!`);
return "ok";
return provMongo;
});
}
const apply = function( scenario, provisionId, userId ) {
const apply = function( provMongo, user ) {
const scenarioPath = path.join(scenarioPathOrig, `${scenario}_${provisionId}`);
const name = `qmi-tf-apply-${provisionId}`;
const logFile = path.resolve(__dirname, '..', 'proc-logs', 'provision', `${provisionId}.log`);
const name = `qmi-tf-apply-${provMongo._id}`;
console.log(`Provision: will spin up container: ${name}`);
var processStream = fs.createWriteStream(logFile, {flags:'a'});
return docker.run(DOCKERIMAGE, ['apply', '-no-color', '-var-file=scenario.tfvars', '-var', `provision_id=${provisionId}`, '-var', `user_id=${userId}`, '-auto-approve'], processStream, {
"Env": ["NODE_ENV=development"],
var processStream = fs.createWriteStream(provMongo.logFile, {flags:'a'});
//var processStream = process.stdout;
return docker.run(DOCKERIMAGE, ['apply', '-no-color', '-var-file=scenario.tfvars', '-var', `provision_id=${provMongo._id}`, '-var', `user_id=${user}`, '-auto-approve'], processStream, {
//"Env": ["NODE_ENV=development"],
"name": name,
"WorkingDir": "/app",
"HostConfig": {
"Binds": [
`${scenarioPath}:/app`,
`${provMongo.path}:/app`,
"/Users/aor/.ssh:/root/.ssh"
],
"NetworkMode": "host"
@@ -64,27 +60,23 @@ const apply = function( scenario, provisionId, userId ) {
return container.remove();
}).then(function() {
console.log(`Provision: ${name} removed!`);
const output = fs.readFileSync(logFile);
return output;
return fs.readFileSync(provMongo.logFile);
})
}
const destroy = function(scenario, provisionId, destroyId) {
const destroy = function(destroyMongo, provMongo) {
const scenarioPath = path.join(scenarioPathOrig, `${scenario}_${provisionId}`);
console.log("DESTROY PATH", scenarioPath);
const name = `qmi-tf-destroy-${destroyId}`;
const logFile = path.resolve(__dirname, '..', 'proc-logs', 'destroy', `${destroyId}.log`);
const name = `qmi-tf-destroy-${destroyMongo._id}`;
console.log(`Destroy Provision: will spin up container: ${name}`);
var processStream = fs.createWriteStream(logFile, {flags:'a'});
var processStream = fs.createWriteStream(destroyMongo.logFile, {flags:'a'});
return docker.run(DOCKERIMAGE, ['destroy', '-no-color', '-var-file=scenario.tfvars', "-var", `provision_id=${provisionId}`, '-auto-approve'], processStream, {
"Env": ["NODE_ENV=development"],
return docker.run(DOCKERIMAGE, ['destroy', '-no-color', '-var-file=scenario.tfvars', "-var", `provision_id=${destroyMongo.provId}`, '-auto-approve'], processStream, {
//"Env": ["NODE_ENV=development"],
"name": name,
"WorkingDir": "/app",
"HostConfig": {
"Binds": [
`${scenarioPath}:/app`,
`${provMongo.path}:/app`,
"/Users/aor/.ssh:/root/.ssh"
]
}
@@ -95,7 +87,7 @@ const destroy = function(scenario, provisionId, destroyId) {
return container.remove();
}).then(async function(data) {
console.log(`Processor Destroy: '${name}' removed!`);
return fs.readFileSync(logFile);
return fs.readFileSync(destroyMongo.logFile);
});
}

View File

@@ -17,6 +17,7 @@ const destroySchema = new mongoose.Schema({
type: String,
default: "queued"
},
logFile: String,
jobId: String,
provId: String
});

View File

@@ -3,39 +3,36 @@ const path = require('path');
const PROJECT_PATH = process.env.PROJECT_PATH;
const tf = require("./docker/tf.js");
module.exports = function(job){
module.exports = async function(job){
const scenarioPath = path.join(PROJECT_PATH, '..', 'qmi-cloud-provisions', `${job.data.scenario}_${job.data.id}`);
db.provision.update(job.data.id, {
const provJob = await db.provision.update(job.data.id, {
"status": "initializing",
"jobId": job.id,
"logFile": `/proc-logs/${job.data.id}.log`,
"path": scenarioPath
"logFile": path.join('/logs', 'provision', `${job.data.id}.log`),
"path": path.join(PROJECT_PATH, '..', 'qmi-cloud-provisions', `${job.data.scenario}_${job.data.id}`)
});
// TERRAFORM INIT
return tf.init(job.data.scenario, job.data.id)
.then(function() {
let update = db.provision.update(job.data.id,{"status": "provisioning"});
return update;
}).then(function() {
return tf.init(provJob)
.then(async function() {
return await db.provision.update(provJob._id,{"status": "provisioning"});
}).then(function(provJobUpdated) {
// TERRAFORM APPLY
return tf.apply(job.data.scenario, job.data.id, job.data.user.displayName);
return tf.apply(provJobUpdated, job.data.user.displayName);
}).then(function(output) {
let update;
if ( output.indexOf("Error") !== -1 ) {
update = db.provision.update(job.data.id, {"status": "error"});
update = db.provision.update(provJob._id, {"status": "error"});
} else {
update = db.provision.update(job.data.id, {"status": "provisioned"});
update = db.provision.update(provJob._id, {"status": "provisioned"});
}
return update;
}).then(function(mongoUpdated){
return Promise.resolve({"success": true, job: mongoUpdated});
return Promise.resolve({"success": true, provMongo: mongoUpdated});
}).catch(function(err) {
console.log("Provision: err", err);
db.provision.update(job.data.id, {"status": "error"});
return Promise.reject({"success": false, "err": err});
console.log("Provision: error", err);
db.provision.update(provJob._id, {"status": "error"});
return Promise.reject({"success": false, "error": err});
});
}

View File

@@ -1,31 +1,33 @@
const tf = require('./docker/tf.js');
const db = require('./mongo.js');
const path = require('path');
module.exports = async function(job){
var destroyJob = await db.destroy.update(job.data.id, {
var destroyMongo = await db.destroy.update(job.data.id, {
"status": "destroying",
"jobId": job.id
"jobId": job.id,
"logFile": path.join('/logs', 'destroy', `${job.data.id}.log`)
});
//var processStream = process.stdout;
var provMongo = await db.provision.getSingle(job.data.provId);
return tf.destroy(job.data.scenario, job.data.provId, destroyJob._id)
return tf.destroy(destroyMongo, provMongo)
.then(async function(output) {
let update, update2;
if ( output.indexOf("Error") !== -1 ) {
update = await db.destroy.update(destroyJob._id,{"status": "error"});
update2 = await db.provision.update(job.data.provId, {"isDestroyed": false});
update = await db.destroy.update(destroyMongo._id,{"status": "error"});
update2 = await db.provision.update(destroyMongo.provId, {"isDestroyed": false});
} else {
update = await db.destroy.update(destroyJob._id, {"status": "destroyed"});
update2 = await db.provision.update(job.data.provId, {"isDestroyed": true});
update = await db.destroy.update(destroyMongo._id, {"status": "destroyed"});
update2 = await db.provision.update(destroyMongo.provId, {"isDestroyed": true});
}
return { destroy: update, provision: update2 };
}).then(async function(res) {
return Promise.resolve({"success": true, job: res});
}).catch(function(err) {
console.log("Processor Destroy: err", err);
db.destroy.update(destroyJob._id, {"status": "error", "isDestroyed": false});
db.destroy.update(destroyMongo._id, {"status": "error", "isDestroyed": false});
return Promise.reject({"success": false, "err": err});
});

View File

@@ -1,7 +1,6 @@
const express = require('express')
const router = express.Router()
const db = require('../mongo.js');
const path = require("path");
const passport = require('../passport');
@@ -84,7 +83,7 @@ router.get('/:id/logs', passport.ensureAuthenticated ,async (req, res, next) =>
if (!mongoJob){
return res.status(404).json({"msg": "Not found"});
}
return res.sendFile(path.resolve(__dirname, '..', 'proc-logs', 'destroy', mongoJob._id +".log"));
return res.sendFile(mongoJob.logFile);
} catch (error) {
next(error);
}

View File

@@ -1,7 +1,6 @@
const express = require('express')
const router = express.Router()
const db = require('../mongo.js');
const path = require("path");
const passport = require('../passport');
@@ -119,7 +118,7 @@ router.get('/:id/logs', passport.ensureAuthenticated, async (req, res, next) =>
if (!mongoJob){
return res.status(404).json({"msg": "Not found"});
}
return res.sendFile(path.resolve(__dirname, '..', 'proc-logs', 'provision', mongoJob._id +".log"));
return res.sendFile(mongoJob.logFile);
} catch (error) {
next(error);
}

View File

@@ -1,7 +1,7 @@
#!/bin/bash
branch=master
cd ./server/az-tf-templates
cd ./az-tf-templates
git checkout master
git checkout .
git pull origin $branch