Skip to content

Commit

Permalink
secure acccess to datasource from hosted code
Browse files Browse the repository at this point in the history
  • Loading branch information
tysonrm committed Nov 12, 2022
1 parent 560b5c4 commit b72334c
Show file tree
Hide file tree
Showing 33 changed files with 674 additions and 559 deletions.
5 changes: 4 additions & 1 deletion .prettierrc
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
{
"tabWidth": 2,
"useTabs": false
"useTabs": false,
"arrowParens": "avoid",
"singleQuote": true,
"semi": false
}
Binary file added forkrun
Binary file not shown.
31 changes: 31 additions & 0 deletions forkrun.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>

#define BUFSIZE 256

char* envvar_cmd = "FORKRUN_CMD";
char* envvar_arg = "FORKRUN_ARG";
char cmd[BUFSIZE];
char arg[BUFSIZE];

int main (void) {
pid_t pid;

if ((pid = fork()) == 0) {
snprintf(cmd, BUFSIZE, "%s", getenv(envvar_cmd));
snprintf(arg, BUFSIZE, "%s", getenv(envvar_arg));
char* const argv[] = { cmd, arg, NULL };

if (execv(cmd, argv) < 0) {
perror("execv error");
}
} else if (pid < 0) {
perror("fork error");
} else {
return EXIT_SUCCESS;
}

return EXIT_FAILURE;
}
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
"format": "yarn pretty -- --write",
"prepublish": "yarn build",
"stats": "git ls-files | xargs wc -l",
"test": "jest"
"test": "jest",
"buildc": "gcc forkrun.c -o forkrun"
},
"peerDependencies": {
"core-js": "^3"
Expand Down Expand Up @@ -69,6 +70,7 @@
"jsdoc-to-markdown": "7.1.1",
"mocha": "10.0.0",
"mocha-esm": "1.1.1",
"prettier": "2.7.1",
"split": "1.0.1",
"standard": "^17.0.0",
"yarn": "1.22.19"
Expand Down
7 changes: 4 additions & 3 deletions src/adapters/controllers/get-model-by-id.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ export default function getModelByIdFactory (findModel) {
try {
httpRequest.log(getModelById.name)

const id = httpRequest.params.id
const query = httpRequest.query
const model = await findModel({ id, query })
const model = await findModel({
id: httpRequest.params.id,
query: httpRequest.query
})

const { content, contentType } = getContent(
httpRequest,
Expand Down
1 change: 1 addition & 0 deletions src/adapters/controllers/get-models.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export default function getModelsFactory (listModels) {
httpRequest.stream = true
return
}

const { content, contentType } = getContent(httpRequest, models)

return {
Expand Down
2 changes: 1 addition & 1 deletion src/adapters/controllers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export const initCache = () => {

async function loadModelInstances () {
console.time(label)
//await Promise.allSettled(specs.map(async m => m && m.fn ? m.fn() : false))
await Promise.allSettled(specs.map(async m => (m && m.fn ? m.fn() : false)))
console.timeEnd(label)
}

Expand Down
4 changes: 2 additions & 2 deletions src/adapters/datasources/datasource-file.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ export class DataSourceFile extends DataSourceMemory {
/**
* @param {Set} map
*/
constructor(map, factory, name) {
super(map, factory, name)
constructor(map, name, options) {
super(map, name, options)
this.file = this.getFilePath()
this.className = DataSourceFile.name
}
Expand Down
4 changes: 2 additions & 2 deletions src/adapters/datasources/datasource-memory.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ import DataSource from '../../domain/datasource'
* Temporary in-memory storage.
*/
export class DataSourceMemory extends DataSource {
constructor (map, factory, name, options) {
super(map, factory, name, options)
constructor (map, name, options) {
super(map, name, options)
this.className = DataSourceMemory.name
}

Expand Down
108 changes: 53 additions & 55 deletions src/adapters/datasources/datasource-mongodb.js
Original file line number Diff line number Diff line change
@@ -1,26 +1,22 @@
"use strict"

import { EventBrokerFactory } from "../../domain"

const broker = EventBrokerFactory.getInstance()
'use strict'

const HIGHWATERMARK = 50

const mongodb = require("mongodb")
const mongodb = require('mongodb')
const { MongoClient } = mongodb
const { DataSourceMemory } = require("./datasource-memory")
const { Transform, Writable } = require("stream")
const qpm = require("query-params-mongo")
const { DataSourceMemory } = require('./datasource-memory')
const { Transform, Writable } = require('stream')
const qpm = require('query-params-mongo')
const processQuery = qpm({
autoDetect: [{ fieldPattern: /_id$/, dataType: "objectId" }],
converters: { objectId: mongodb.ObjectId },
autoDetect: [{ fieldPattern: /_id$/, dataType: 'objectId' }],
converters: { objectId: mongodb.ObjectId }
})

const url = process.env.MONGODB_URL || "mongodb://localhost:27017"
const configRoot = require("../../config").hostConfig
const url = process.env.MONGODB_URL || 'mongodb://localhost:27017'
const configRoot = require('../../config').hostConfig
const dsOptions = configRoot.adapters.datasources.DataSourceMongoDb.options || {
runOffline: true,
numConns: 2,
numConns: 2
}
const cacheSize = configRoot.adapters.cacheSize || 3000

Expand All @@ -31,7 +27,7 @@ const connections = []

const mongoOpts = {
//useNewUrlParser: true,
useUnifiedTopology: true,
useUnifiedTopology: true
}

/**
Expand All @@ -40,8 +36,8 @@ const mongoOpts = {
* even when the database is offline.
*/
export class DataSourceMongoDb extends DataSourceMemory {
constructor (map, factory, name, options = {}) {
super(map, factory, name, options)
constructor (map, name, options = {}) {
super(map, name, options)
this.cacheSize = cacheSize
this.mongoOpts = mongoOpts
this.className = this.constructor.name
Expand All @@ -57,7 +53,7 @@ export class DataSourceMongoDb extends DataSourceMemory {
const client = new MongoClient(this.url, this.mongoOpts)
await client.connect()
connections.push(client)
client.on("connectionClosed", () =>
client.on('connectionClosed', () =>
connections.splice(connections.indexOf(client), 1)
)
}
Expand Down Expand Up @@ -93,17 +89,15 @@ export class DataSourceMongoDb extends DataSourceMemory {
async loadModels () {
try {
const cursor = (await this.collection()).find().limit(this.cacheSize)
cursor.forEach((model) => super.saveSync(model.id, model))
cursor.forEach(model => super.saveSync(model.id, model))
} catch (error) {
console.error({ fn: this.loadModels.name, error })
}
}

async findDb (id) {
try {
const model = await (await this.collection()).findOne({ _id: id })
// save it to the cache
return super.saveSync(id, model) || model // saveSync fails on fresh start
return (await this.collection()).findOne({ _id: id })
} catch (error) {
console.error({ fn: this.findDb.name, error })
}
Expand All @@ -124,7 +118,7 @@ export class DataSourceMongoDb extends DataSourceMemory {
) {
// cached can be empty object, save after finding in db
const data = await this.findDb(id)
super.saveSync(id, data)
if (data) super.saveSync(id, data)
return data
}
return cached
Expand All @@ -143,9 +137,11 @@ export class DataSourceMongoDb extends DataSourceMemory {
async saveDb (id, data) {
try {
const clone = JSON.parse(this.serialize(data))
await (
await this.collection()
).replaceOne({ _id: id }, { ...clone, _id: id }, { upsert: true })
await (await this.collection()).replaceOne(
{ _id: id },
{ ...clone, _id: id },
{ upsert: true }
)
return data
} catch (error) {
console.error({ fn: this.saveDb.name, error })
Expand All @@ -164,22 +160,21 @@ export class DataSourceMongoDb extends DataSourceMemory {
*/
async save (id, data) {
try {
const cache = super.saveSync(id, data)
super.saveSync(id, data)
try {
await this.saveDb(id, data)
} catch (error) {
// default is true
if (!this.runOffline) {
this.deleteSync(id)
super.deleteSync(id)
// after delete mem and db are sync'd
console.error("db trans failed, rolled back")
console.error('db trans failed, rolled back')
return
}
// run while db is down - cache will be ahead
console.error("db trans failed, sync it later")
return data
console.error('db trans failed, sync it later')
}
return cache
return data
} catch (e) {
console.error(e)
}
Expand All @@ -199,14 +194,13 @@ export class DataSourceMongoDb extends DataSourceMemory {
const ctx = this

async function upsert () {
const operations = objects.map((str) => {
const obj = JSON.parse(str)
const operations = objects.map(obj => {
return {
replaceOne: {
filter: { ...filter, _id: obj.id },
replacement: { ...obj, _id: obj.id },
upsert: true,
},
upsert: true
}
}
})

Expand All @@ -216,7 +210,7 @@ export class DataSourceMongoDb extends DataSourceMemory {
const result = await col.bulkWrite(operations)
console.log(result.getRawResponse())
objects = []
} catch (error) { }
} catch (error) {}
}
}

Expand All @@ -233,13 +227,13 @@ export class DataSourceMongoDb extends DataSourceMemory {
end (chunk, _, done) {
objects.push(chunk)
done()
},
}
})

writable.on("finish", async () => await upsert())
writable.on('finish', async () => await upsert())

return writable
} catch (error) { }
} catch (error) {}
}

/**
Expand All @@ -253,7 +247,6 @@ export class DataSourceMongoDb extends DataSourceMemory {
* @returns
*/
async mongoFind ({ filter, sort, limit, aggregate, skip, offset } = {}) {
console.log({ filter })
let cursor = (await this.collection()).find(filter)
if (sort) cursor = cursor.sort(sort)
if (limit) cursor = cursor.limit(limit)
Expand All @@ -275,20 +268,21 @@ export class DataSourceMongoDb extends DataSourceMemory {
streamList ({ writable, serialize, transform, options }) {
try {
let first = true

const serializer = new Transform({
writableObjectMode: true,

// start of array
construct (callback) {
this.push("[")
this.push('[')
callback()
},

// each chunk is a record
transform (chunk, _encoding, callback) {
// comma-separate
if (first) first = false
else this.push(",")
else this.push(',')

// serialize record
this.push(JSON.stringify(chunk))
Expand All @@ -297,25 +291,28 @@ export class DataSourceMongoDb extends DataSourceMemory {

// end of array
flush (callback) {
this.push("]")
this.push(']')
callback()
},
}
})

return new Promise(async (resolve, reject) => {
const readable = (await this.mongoFind(options)).stream()

readable.on("error", reject)
readable.on("end", resolve)
readable.on('error', reject)
readable.on('end', resolve)

// optionally transform db stream then pipe to output
if (serialize && transform)
readable.pipe(transform).pipe(serializer).pipe(writable)
else if (serialize) readable.pipe(serializer).pipe(writable)
if (transform && serialize)
readable
.pipe(transform)
.pipe(serializer)
.pipe(writable)
else if (transform) readable.pipe(transform).pipe(writable)
else if (serialize) readable.pipe(serializer).pipe(writable)
else readable.pipe(writable)
})
} catch (error) { }
} catch (error) {}
}

processOptions (param) {
Expand Down Expand Up @@ -351,8 +348,9 @@ export class DataSourceMongoDb extends DataSourceMemory {
const {
writable = null,
transform = null,
serialize = true,
query = null,
serialize = false,
options = null,
query = null
} = param

try {
Expand All @@ -367,7 +365,7 @@ export class DataSourceMongoDb extends DataSourceMemory {
writable,
serialize,
transform,
options: processedOptions,
options: processedOptions
})
}

Expand All @@ -381,7 +379,7 @@ export class DataSourceMongoDb extends DataSourceMemory {
return {
total: await this.countDb(),
cached: this.getCacheSize(),
bytes: this.getCacheSizeBytes(),
bytes: this.getCacheSizeBytes()
}
}

Expand Down
Loading

0 comments on commit b72334c

Please sign in to comment.