aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTom Harley2018-04-26 05:05:49 +0100
committerTom Harley2018-04-26 05:05:49 +0100
commitf0331e27eab9e699529183b0dc2dd1bc9599aea5 (patch)
treec7c09b1d77006bf7aceb7c3bf1c23910c9a55168
parentMiscellaneous minor fixes (diff)
parentpushing for tom to get changes (diff)
downloadmedicine-f0331e27eab9e699529183b0dc2dd1bc9599aea5.tar.gz
medicine-f0331e27eab9e699529183b0dc2dd1bc9599aea5.zip

Merge branch ‘trh/gets’ of gitlab.cs.st-andrews.ac.uk:cs3099group-be-4/project-code into trh/gets

-rw-r--r--src/conversion/czi/czi.ts120
-rw-r--r--src/conversion/czi/pyramidWorker.ts12
-rw-r--r--src/conversion/leica/scn.ts30
-rw-r--r--src/files.ts3
-rw-r--r--src/logger.ts13
-rw-r--r--src/ppq/index.js34
6 files changed, 123 insertions, 89 deletions
diff --git a/src/conversion/czi/czi.ts b/src/conversion/czi/czi.ts
index 448d81c..b611b8b 100644
--- a/src/conversion/czi/czi.ts
+++ b/src/conversion/czi/czi.ts
@@ -13,9 +13,10 @@ import { uuid } from '../../uuid'
import { queue as jobQueue, Promiser as Job } from '../../ppq'
import { profiler } from '../../profiler';
import { exec } from '../types/exec'
-sharp.concurrency(2);
let log: Logger;
+const THREAD_LIMIT: number = 2;
+sharp.concurrency(THREAD_LIMIT);
const readFile = require('util').promisify(fs.readFile);
const readdir = require('util').promisify(fs.readdir);
const exists = require('util').promisify(fs.exists);
@@ -538,51 +539,64 @@ const zoomTier: (
quadrents += `${outputImageDirectory}${previousPlane[ys + 1][xs + 1].file} `;
}
- let outputFileName: string =`${outputImageData}/tmp/${uuid.generate()}.png`;
- await jobQueue.enqueue(4, exec, null, `${execpaths} vips arrayjoin "${quadrents}" ${outputFileName} --across ${across}`);
+ let outputFileName: string =`${outputImageData}/tmp/${uuid.generate()}.png`;
- // Rescale and push to file
- if (quadrentCount !== 3) {
-
- let extRight: number = 0, extBottom: number = 0;
- if (moreToRight) {
- extBottom = tileSize;
- } else if (moreToBottom){
- extRight = tileSize;
- }
-
- let fullTileQuadRef: SharpInstance = sharp(outputFileName)
- .background({r: 0, g: 0, b: 0, alpha: 1})
- .extend({
- top: 0, left: 0,
- bottom: extBottom, right: extRight
- });
- let fullTileQuad: Buffer = await jobQueue.enqueue(5, fullTileQuadRef.toBuffer as Job<Buffer>, fullTileQuadRef);
-
- let rescaledTileQuadRef: SharpInstance = sharp(fullTileQuad)
- .resize(tileSize, tileSize);
-
- await jobQueue.enqueue(5, rescaledTileQuadRef.toFile as Job<sharp.OutputInfo>, rescaledTileQuadRef,
- `${outputImageDirectory}img-c${c}-p${p}-y${ys * tileSize}-x${xs * tileSize}.png`);
- } else {
- let fullTileQuadRef: SharpInstance = sharp(outputFileName)
- .resize(tileSize, tileSize)
-
- await jobQueue.enqueue(5, fullTileQuadRef.toFile as Job<sharp.OutputInfo>, fullTileQuadRef,
- `${outputImageDirectory}img-c${c}-p${p}-y${ys * tileSize}-x${xs * tileSize}.png`);
- }
- fs.unlink(`${outputFileName}`, (err) => {
- log.warn(fileName + ' Error when deleting redundant resource: \n' + err.stack)
- });
+ try {
+ await new Promise((res) => {
+ shell.exec(`${execpaths} vips arrayjoin "${quadrents}" ${outputFileName} --across ${across} --vips-concurrency=${THREAD_LIMIT}`);
+ res();
+ });
+ } catch (err) {
+ if (err) {
+ log.fatal(err);
+ }
+ }
- finalCZIJson.total_files++;
- return {
- x_offset: previousPlane[ys][xs].x_offset,
- y_offset: previousPlane[ys][xs].y_offset,
- width: previousPlane[ys][xs].width * 2,
- height: previousPlane[ys][xs].height * 2,
- file: `img-c${c}-p${p}-y${ys * tileSize}-x${xs * tileSize}.png`
- };
+ // Rescale and push to file
+ if (quadrentCount !== 3) {
+
+ let extRight: number = 0, extBottom: number = 0;
+ if (moreToRight) {
+ extBottom = tileSize;
+ } else if (moreToBottom){
+ extRight = tileSize;
+ }
+
+ let fullTileQuadRef: SharpInstance = sharp(outputFileName)
+ .background({r: 0, g: 0, b: 0, alpha: 1})
+ .extend({
+ top: 0, left: 0,
+ bottom: extBottom, right: extRight
+ });
+ let fullTileQuad: Buffer = await jobQueue.enqueue(5, fullTileQuadRef.toBuffer as Job<Buffer>, fullTileQuadRef);
+
+ let rescaledTileQuadRef: SharpInstance = sharp(fullTileQuad)
+ .resize(tileSize, tileSize);
+
+ await jobQueue.enqueue(5, rescaledTileQuadRef.toFile as Job<sharp.OutputInfo>, rescaledTileQuadRef,
+ `${outputImageDirectory}img-c${c}-p${p}-y${ys * tileSize}-x${xs * tileSize}.png`);
+ } else {
+ let fullTileQuadRef: SharpInstance = sharp(outputFileName)
+ .resize(tileSize, tileSize)
+
+ await jobQueue.enqueue(5, fullTileQuadRef.toFile as Job<sharp.OutputInfo>, fullTileQuadRef,
+ `${outputImageDirectory}img-c${c}-p${p}-y${ys * tileSize}-x${xs * tileSize}.png`);
+ }
+
+ fs.unlink(`${outputFileName}`, (err) => {
+ if (err) {
+ log.warn(fileName + ' Error when deleting redundant resource: \n' + err.stack)
+ }
+ });
+
+ finalCZIJson.total_files++;
+ return {
+ x_offset: previousPlane[ys][xs].x_offset,
+ y_offset: previousPlane[ys][xs].y_offset,
+ width: previousPlane[ys][xs].width * 2,
+ height: previousPlane[ys][xs].height * 2,
+ file: `img-c${c}-p${p}-y${ys * tileSize}-x${xs * tileSize}.png`
+ };
}
const cziRow: Promise<CZITile>[] = [];
@@ -730,7 +744,7 @@ const extrapolateDimension: (
writeJSONToFile(`${outputImageData}/intermediate-stage-map.json`, retHeightMap);
}
- log.notify(`${fileName} > Completed Extrapolation for dimension, \'C\': ${cVal}\n\n`);
+ log.notify(`${fileName} > Completed Extrapolation for dimension, \'C\': ${cVal}`);
return retHeightMap;
};
@@ -814,7 +828,7 @@ const buildCustomPyramids: () => Promise<boolean> = async(): Promise<boolean> =>
// On error, write the error to console and set an error true.
if (err) {
console.error(err.message);
- console.log(err);
+ console.log(err.stack);
}
successfulBuild = false;
}
@@ -838,8 +852,8 @@ const initialExtractAndConvert: (absFilePath: string, space: string) => Promise<
checkForOutputDirectories([outputImageData, extractDirectory]);
log.silly(`${fileName} > This will complete at roughly 2GB/min`);
- exec(`${execpaths} CZICrunch "${absFilePath}" "${extractDirectory}"`);
- exec(`${execpaths} python3 ./ext/bin/convertJxrs.py "${extractDirectory}"`);
+ await exec(`${execpaths} CZICrunch "${absFilePath}" "${extractDirectory}"`);
+ await exec(`${execpaths} python3 ./ext/bin/convertJxrs.py "${extractDirectory}"`);
// let totalFiles: number = 0, counter: number = 0;
// console.log("=========== BEGIN JXR CONVERSION ===========")
@@ -892,16 +906,16 @@ log.notice("CZI Convertor Received new file: " + fileName);
outputImageDirectory = outputImageData + "/data/";
if (!(await exists(`${outputImageData}/supported_views.json`))) {
- log.info(`${fileName} > Begin Extracting and Converting to PNG`);
+ log.information(`${fileName} > Begin Extracting and Converting to PNG`);
await initialExtractAndConvert(original, space);
- log.info(`${fileName} > Checking/Creating output directories...`);
+ log.information(`${fileName} > Checking/Creating output directories...`);
checkForOutputDirectories([outputImageDirectory, `${outputImageData}/tmp/`]);
- log.info(`${fileName} > Creating Supported Views and writing files...`);
+ log.information(`${fileName} > Creating Supported Views and writing files...`);
await createSupportedViewsObject(false);
} else {
- log.alert("It appears as though this CZI has already been extracted, loading files...");
+ log.notify("It appears as though this CZI has already been extracted, loading files...");
await createSupportedViewsObject(true);
supportedViews = require(`${outputImageData}/supported_views.json`);
}
@@ -932,7 +946,7 @@ log.notice("CZI Convertor Received new file: " + fileName);
// console.log("REMEMBER TO REMOVE THE MAIN CALL AGAIN MR GOOSEMUN!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
-// main("/cs/scratch/cjd24/0701.czi", "/cs/scratch/cjd24/0701-extract");
+//main("/cs/scratch/cjd24/0702.czi", "/cs/scratch/cjd24/0702-extract");
// /* tslint:disable */
// async function main2() {
diff --git a/src/conversion/czi/pyramidWorker.ts b/src/conversion/czi/pyramidWorker.ts
index c52aac2..9851c1f 100644
--- a/src/conversion/czi/pyramidWorker.ts
+++ b/src/conversion/czi/pyramidWorker.ts
@@ -9,6 +9,7 @@ import { RequestError } from '../../errors'
import { logger, Logger } from '../../logger'
import { exec } from '../types/exec'
import { queue as jobQueue, Promiser as Job } from '../../ppq'
+const shell = require('shelljs');
const readFile = require('util').promisify(fs.readFile);
const log: Logger = logger.for({component: "CZI Live Server"});
@@ -78,7 +79,16 @@ const getFinalTile: Function = async function(imageDir: string, imageTier: CZIHe
let id: string = uuid.generate();
let intermediateFileName: string = `${imageDir}tmp/${id}.png`
let outputFileName: string = `${imageDir}tmp/${id}-out.png`
- await jobQueue.enqueue(2, exec, null, `${execpaths} vips arrayjoin "${involvedTiles}" ${intermediateFileName} --across ${imageTier.plane[0].length}`);
+ try {
+ await new Promise((res) => {
+ shell.exec(`${execpaths} vips arrayjoin "${involvedTiles}" ${intermediateFileName} --across ${imageTier.plane[0].length} --vips-concurrency=2`);
+ res();
+ });
+ } catch (err) {
+ if (err) {
+ log.fatal(err);
+ }
+ }
desiredRegion.scaleDown(imageTier.zoom_level);
diff --git a/src/conversion/leica/scn.ts b/src/conversion/leica/scn.ts
index ce9d36c..a7d4573 100644
--- a/src/conversion/leica/scn.ts
+++ b/src/conversion/leica/scn.ts
@@ -4,7 +4,8 @@ import * as sharp from 'sharp';
import { SharpInstance } from 'sharp';
import { SupportedViews, TileBounds, writeJSONToFile, checkForOutputDirectories, execpaths } from '../types/helpers';
import { uuid } from '../../uuid'
-import { logger } from '../../logger';
+import { logger, Logger } from '../../logger';
+let log: Logger;
// CHANGE ME FOR TESTING!!!!!
const input: string = "diseessse1.scn"
@@ -19,15 +20,13 @@ const tileOverlap: number = 0; // Overlap is only half implemented
const tileSize: number = 512;
-const crunchLeica: () => Promise<string> = async (): Promise<string> => {
-
+export const crunchLeica: (original: string, space:string) => Promise<void> = async (original: string, space:string): Promise<void> => {
+ log = logger.for({component: "SCN Crunch", targetFile: original});
try {
- let output: string =`${baseDirname}${uuid.generate()}/`;
- let leica: SharpInstance = sharp(baseDirname + input).png();
- logger.debug("Read new sharp instance for lecia: " + input);
+ let leica: SharpInstance = sharp(original).png();
- checkForOutputDirectories([output]);
- logger.info("Creating new output directory for LECIA > DZI @ " + output);
+ checkForOutputDirectories([space]);
+ log.info("Creating new output directory for LECIA > DZI @ " + space);
let meta: sharp.Metadata = await leica.metadata();
@@ -69,13 +68,14 @@ const crunchLeica: () => Promise<string> = async (): Promise<string> => {
})
}
if (!supportedViews.scalable_image) {
+ log.error(`Target File is not a scalable_image`)
throw new Error("Not a scalable image");
}
supportedViews.scalable_image.channels = channels;
}
- writeJSONToFile(`${output}supported_views.json`, supportedViews)
- logger.info("Wrote supported_views object for: " + output);
+ writeJSONToFile(`${space}/supported_views.json`, supportedViews)
+ log.info("Wrote supported_views object in: " + space);
await sharp(baseDirname + input)
.png()
@@ -84,15 +84,13 @@ const crunchLeica: () => Promise<string> = async (): Promise<string> => {
overlap: tileOverlap,
layout: "dz"
})
- .toFile(`${output}output`);
- logger.success(`Finalised output of ${input} as .dzi`);
- return `${output}output.dzi`;
+ .toFile(`${space}/output`);
+ log.success(`Finalised output of ${input} as .dzi`);
}
catch (err) {
- logger.failure(`Lecia conversion failed with error: ${err}`)
- return `Lecia conversion failed with error: ${err}`;
+ log.failure(`Lecia conversion failed with error: ${err}`)
}
};
-crunchLeica().then((ret:string) => console.log(ret));
+// crunchLeica().then((ret:string) => console.log(ret));
diff --git a/src/files.ts b/src/files.ts
index 1c70d8a..d29d7e5 100644
--- a/src/files.ts
+++ b/src/files.ts
@@ -138,12 +138,11 @@ export const saveFile: (
if (!!query.truncate) {
await truncateFile(fileId, projectName, query.offset || 0);
}
- const fuckingOffset: number = query.offset || 0;
// Open file
const fd: number = await fs.open(path(fileId, projectName), 'r+');
// Write data to file
try {
- await fs.write(fd, data, 0, data.length, Number(fuckingOffset));
+ await fs.write(fd, data, 0, data.length, Number(query.offset || 0));
} catch (err) {
logger.failure("Error while writing to file: " + err);
}
diff --git a/src/logger.ts b/src/logger.ts
index aa8192c..bed8f72 100644
--- a/src/logger.ts
+++ b/src/logger.ts
@@ -2,9 +2,20 @@
import * as colors from 'colors/safe';
import * as winston from 'winston';
+import { profiler } from './profiler'
import { logPath } from './files';
import { QueryOptions } from 'sequelize';
+const dbQuery = profiler.meter({
+ name: "Database Requests Per Minute",
+ samples: 60,
+ timeframe: 60
+})
+const dbTime = profiler.histogram({
+ name: "Time to Serve DB requests (ms)",
+ measurement: "mean",
+ agg_type: "avg"
+})
type Colour = (s: string) => string;
@@ -388,6 +399,8 @@ export const logger: Logger = setDefaults({ user: '_BE4_system', component: 'cor
export const logQuery: (query: string, duration: string) => void = (
query: string, duration: string
): void => {
+ dbQuery.mark();
+ dbTime.update(Number(duration));
queryLogger.info(query, {
duration
});
diff --git a/src/ppq/index.js b/src/ppq/index.js
index ec2f188..84d2b7e 100644
--- a/src/ppq/index.js
+++ b/src/ppq/index.js
@@ -19,23 +19,23 @@ class PromisePriorityQueue {
if (config.concurrent) this.concurrent = config.concurrent;
}
- // enqueue(i, p, t, ...args) {
- // return p.bind(t)(...args);
- // }
-
- enqueue(i, p, t, ...argv) {
- this.size++;
- return new Promise((res, rej) => {
- this.queues[i-1].push(
- [
- p.bind(t, ...argv),
- res,
- rej
- ]
- );
- this.run();
- });
- };
+ enqueue(i, p, t, ...args) {
+ return p.bind(t)(...args);
+ }
+
+ // enqueue(i, p, t, ...argv) {
+ // this.size++;
+ // return new Promise((res, rej) => {
+ // this.queues[i-1].push(
+ // [
+ // p.bind(t, ...argv),
+ // res,
+ // rej
+ // ]
+ // );
+ // this.run();
+ // });
+ // };
dequeue() {
for (let i = 0; i < this.least; ++i) {