Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"env": {
"commonjs": true,
"es6": true,
"node": true
},
"extends": ["prettier", "plugin:sonarjs/recommended"],
"plugins": ["prettier", "sonarjs"],
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018
},
"rules": {
"prettier/prettier": "error",
"sonarjs/cognitive-complexity": "error",
"sonarjs/no-identical-expressions": "error",
"sonarjs/no-duplicate-string": "error",
"sonarjs/no-identical-functions": "error",
"sonarjs/prefer-immediate-return": "error"
}
}
76 changes: 76 additions & 0 deletions .github/workflows/gitops.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
name: Gitops

# Run this workflow every time a new commit pushed to your repository
on:
workflow_dispatch:
inputs:
environmentValuesFile:
description: 'Gitops environment values file'
required: true
default: 'dev'
push:
branches:
- master
- development
- release/*

jobs:
gitops:
environment: gitops
env:
HELM_REPO_NAME: openstad-kubernetes
HELM_CHART_FOLDER: k8s/openstad
GIT_USER_EMAIL: github@ci.push
GIT_USER_NAME: GitHub
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
HELM_REPO: ${{ secrets.HELM_REPO }}
HELM_REPO_WITH_TOKEN: ${{ secrets.HELM_REPO_WITH_TOKEN }}
GITOPS_RELEASE_BRANCH: ${{ secrets.GITOPS_RELEASE_BRANCH }}
BRANCH_REF: ${{ github.ref }}
GITOPS_VALUES_FILE: k8s/openstad/environments/dev.values.yaml

name: gitops commit
runs-on: ubuntu-latest

services:
docker:
image: docker

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Set environment to acc
id: acc_values_file
shell: bash
if: contains(github.ref, 'release')
run: echo "GITOPS_VALUES_FILE=k8s/openstad/environments/acc.values.yaml" >> $GITHUB_ENV

- name: Set environment to production
id: prod_values_file
shell: bash
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/master'
run: echo "GITOPS_VALUES_FILE=k8s/openstad/environments/prod.values.yaml" >> $GITHUB_ENV

- name: Set commit SHA & current branch
id: vars
shell: bash
run: |
echo "::set-output name=sha_short::$(git rev-parse --short HEAD)"
echo "::set-output name=current_branch::$(git branch --show-current | sed "s/\//-/g")"

- name: Install yq
run: sudo snap install yq --channel=v3/stable

- name: Run build script
run: docker build -t ${{ secrets.DOCKER_PUBLIC_USERNAME }}/${{ secrets.DOCKER_IMAGE_NAME }}:${{ steps.vars.outputs.current_branch }}-${{ steps.vars.outputs.sha_short }}-${{ github.run_id }} .
shell: bash

- name: Run docker push script
run: |
sudo chmod a+x ./gitops_push
./gitops_push
shell: bash
env:
IMAGE_TAG: ${{ secrets.DOCKER_PUBLIC_USERNAME }}/${{ secrets.DOCKER_IMAGE_NAME }}:${{ steps.vars.outputs.current_branch }}-${{ steps.vars.outputs.sha_short }}-${{ github.run_id }}
11 changes: 11 additions & 0 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
name: CI
on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install modules
run: npm i
- name: Run ESLint
run: npm run lint
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ typings/
.node_repl_history

images
files

# Output of 'npm pack'
*.tgz
Expand Down
152 changes: 152 additions & 0 deletions app.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
require('dotenv').config();
const express = require('express');
const app = express();
const passport = require('./lib/passport');
const upload = require('./lib/multer');
const ImageServer = require('./lib/imageServer').getImageServer();
const s3 = require('./lib/s3');
const fs = require('fs');
const mime = require('mime-types');

/**
* Instantiate the Image steam server, and proxy it with
*/
const imageHandler = ImageServer.getHandler();

/**
* Most errors is not found
* @TODO: requires debugging if other errors are handled by server
*/
ImageServer.on('error', (err) => {
// Don't log 404 errors, so we do nothing here.
});

function sendJsonHeaders(res) {
if (!res.headerSent) {
res.setHeader('Content-Type', 'application/json');
}
}

// Global middleware to catch errors
app.use(function (err, req, res, next) {
const status = err.status ? err.status : 500;
if (status > 500) {
console.error(err);
}

sendJsonHeaders(res);
res.status(status).send(
JSON.stringify({
error: err.message,
})
);
});

app.get('/image/*', function (req, res, next) {
req.url = req.url.replace('/image', '');

/**
* Pass request en response to the image server
*/
imageHandler(req, res);
});

/**
* The url for creating one Image
*/
app.post(
'/image',
passport.authenticate('bearer', { session: false }),
upload.single('image'),
(req, res, next) => {
// req.file is the `image` file
// req.body will hold the text fields, if there were any
//
sendJsonHeaders(res);
const filename = req.file.key || req.file.filename;
res.send(
JSON.stringify({
url: process.env.APP_URL + '/image/' + filename,
})
);
}
);

app.post(
'/images',
passport.authenticate('bearer', { session: false }),
upload.array('images', 30),
(req, res, next) => {
// req.files is array of `photos` files
// req.body will contain the text fields, if there were any
sendJsonHeaders(res);

res.send(
JSON.stringify(
req.files.map((file) => {
const filename = file.key || file.filename;
return {
url: process.env.APP_URL + '/image/' + filename,
};
})
)
);
}
);

app.post(
'/file',
passport.authenticate('bearer', { session: false }),
upload.single('file'),
(req, res, next) => {
// req.file is the `image` file
// req.body will hold the text fields, if there were any
//
sendJsonHeaders(res);

const filename = req.file.key || req.file.filename;
res.send(
JSON.stringify({
url: process.env.APP_URL + '/files/' + filename,
})
);
}
);

function handleFileResponse(filePath, readStream, res) {
// Content-type is very interesting part that guarantee that
// Web browser will handle response in an appropriate manner.

// Get filename
const filename = filePath.substring(filePath.lastIndexOf('/') + 1);
const mimeType = mime.lookup(filename);

res.writeHead(200, {
'Content-Type': mimeType,
'Content-Disposition': 'attachment; filename=' + filename,
});

readStream.pipe(res);
}

app.get('/files/*', async function (req, res, next) {
const filePath = decodeURI(req.url.replace(/^\/+/, '').replace('files/', ''));

if (s3.isEnabled()) {
const readStream = s3.getFile(filePath).createReadStream();
return handleFileResponse(filePath, readStream, res);
}

// Check if file specified by the filePath exists
fs.exists(filePath, function (exists) {
if (exists) {
const readStream = fs.createReadStream(filePath);
handleFileResponse(filePath, readStream, res);
} else {
res.writeHead(400, { 'Content-Type': 'text/plain' });
res.end('ERROR File does not exist');
}
});
});

module.exports = app;
29 changes: 14 additions & 15 deletions db/clients.js
Original file line number Diff line number Diff line change
@@ -1,28 +1,27 @@
const knex = require('../knex/knex.js');
const bookshelf = require('bookshelf')(knex);

const clients = bookshelf.Model.extend({
tableName: 'clients',
hasTimestamps: true,
hasTimestamps: ['created_at', 'updated_at']
hasTimestamps: ['created_at', 'updated_at'],
});

exports.clients = clients;

exports.findByToken = function(token, cb) {
process.nextTick(function() {
clients
.fetchAll()
.then(function (records) {
records = records.serialize();

for (var i = 0, len = records.length; i < len; i++) {
var record = records[i];
if (record.token === token) {
return cb(null, record);
}
exports.findByToken = function (token, cb) {
process.nextTick(function () {
clients.fetchAll().then(function (records) {
records = records.serialize();

for (var i = 0, len = records.length; i < len; i++) {
var record = records[i];
if (record.token === token) {
return cb(null, record);
}
}

return cb(null, null);
return cb(null, null);
});
});
}
};
24 changes: 24 additions & 0 deletions gitops_push
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/bin/bash

echo "DOCKER LOGIN"
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin;
echo "DOCKER PUSH TAG"
echo ${IMAGE_TAG}
docker push ${IMAGE_TAG}

git config --global user.email ${GIT_USER_EMAIL}
git config --global user.name ${GIT_USER_NAME}

git clone ${HELM_REPO} && cd ${HELM_REPO_NAME} && \

git remote add origin-ci ${HELM_REPO_WITH_TOKEN} > /dev/null 2>&1

git checkout ${GITOPS_RELEASE_BRANCH}

/snap/bin/yq write -i ${GITOPS_VALUES_FILE} api.deploymentContainer.image ${IMAGE_TAG} && \

git add ${GITOPS_VALUES_FILE} && \

git commit -am "Release ${IMAGE_TAG}" && \

git push --quiet --set-upstream origin-ci ${GITOPS_RELEASE_BRANCH}
42 changes: 42 additions & 0 deletions lib/imageServer.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
require('dotenv').config();
const s3 = require('./s3');

module.exports.getImageServer = () => {
const imgSteam = require('image-steam');

const imageSteamConfig = {
storage: {
defaults: {
driver: 'fs',
path: './files',
},
cacheTTS: process.env.CACHE_TTS || 86400 * 14 /* 24 * 14 hrs */,
cacheOptimizedTTS:
process.env.CACHE_OPTIMIZED_TTS || 86400 * 14 /* 24 * 14 hrs */,
cacheArtifacts: process.env.CACHE_ARTIFACTS || true,
},
throttle: {
ccProcessors: process.env.THROTTLE_CC_PROCESSORS || 4,
ccPrefetchers: process.env.THROTTLE_CC_PREFETCHER || 20,
ccRequests: process.env.THROTTLE_CC_REQUESTS || 100,
},
log: {
errors: false,
},
};

if (s3.isEnabled()) {
imageSteamConfig.storage.defaults = {
driverPath: 'image-steam-s3',
endpoint: process.env.S3_ENDPOINT,
bucket: process.env.S3_BUCKET,
accessKey: process.env.S3_KEY,
secretKey: process.env.S3_SECRET,
};
}

/**
* Instantiate the Image steam server, and proxy it with
*/
return new imgSteam.http.Connect(imageSteamConfig);
};
Loading